diff --git a/packages/codegen/.telescope.json b/packages/codegen/.telescope.json new file mode 100644 index 00000000..100bd5e4 --- /dev/null +++ b/packages/codegen/.telescope.json @@ -0,0 +1,17 @@ +{ + "protoDirs": [ + "./proto" + ], + "outPath": "./src", + "options": { + "aminoEncoding": { + "enabled": false + }, + "lcdClients": { + "enabled": true + }, + "rpcClients": { + "enabled": true + } + } +} \ No newline at end of file diff --git a/packages/codegen/dist/amino/amino.d.ts b/packages/codegen/dist/amino/amino.d.ts new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/packages/codegen/dist/amino/amino.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/packages/codegen/dist/amino/bundle.d.ts b/packages/codegen/dist/amino/bundle.d.ts new file mode 100644 index 00000000..67f7ba53 --- /dev/null +++ b/packages/codegen/dist/amino/bundle.d.ts @@ -0,0 +1 @@ +export declare const amino: {}; diff --git a/packages/codegen/dist/confio/proofs.d.ts b/packages/codegen/dist/confio/proofs.d.ts new file mode 100644 index 00000000..492007ac --- /dev/null +++ b/packages/codegen/dist/confio/proofs.d.ts @@ -0,0 +1,439 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../helpers"; +export declare enum HashOp { + /** NO_HASH - NO_HASH is the default if no data passed. Note this is an illegal argument some places. */ + NO_HASH = 0, + SHA256 = 1, + SHA512 = 2, + KECCAK = 3, + RIPEMD160 = 4, + /** BITCOIN - ripemd160(sha256(x)) */ + BITCOIN = 5, + UNRECOGNIZED = -1 +} +export declare const HashOpSDKType: typeof HashOp; +export declare function hashOpFromJSON(object: any): HashOp; +export declare function hashOpToJSON(object: HashOp): string; +/** + * LengthOp defines how to process the key and value of the LeafOp + * to include length information. After encoding the length with the given + * algorithm, the length will be prepended to the key and value bytes. + * (Each one with it's own encoded length) + */ +export declare enum LengthOp { + /** NO_PREFIX - NO_PREFIX don't include any length info */ + NO_PREFIX = 0, + /** VAR_PROTO - VAR_PROTO uses protobuf (and go-amino) varint encoding of the length */ + VAR_PROTO = 1, + /** VAR_RLP - VAR_RLP uses rlp int encoding of the length */ + VAR_RLP = 2, + /** FIXED32_BIG - FIXED32_BIG uses big-endian encoding of the length as a 32 bit integer */ + FIXED32_BIG = 3, + /** FIXED32_LITTLE - FIXED32_LITTLE uses little-endian encoding of the length as a 32 bit integer */ + FIXED32_LITTLE = 4, + /** FIXED64_BIG - FIXED64_BIG uses big-endian encoding of the length as a 64 bit integer */ + FIXED64_BIG = 5, + /** FIXED64_LITTLE - FIXED64_LITTLE uses little-endian encoding of the length as a 64 bit integer */ + FIXED64_LITTLE = 6, + /** REQUIRE_32_BYTES - REQUIRE_32_BYTES is like NONE, but will fail if the input is not exactly 32 bytes (sha256 output) */ + REQUIRE_32_BYTES = 7, + /** REQUIRE_64_BYTES - REQUIRE_64_BYTES is like NONE, but will fail if the input is not exactly 64 bytes (sha512 output) */ + REQUIRE_64_BYTES = 8, + UNRECOGNIZED = -1 +} +export declare const LengthOpSDKType: typeof LengthOp; +export declare function lengthOpFromJSON(object: any): LengthOp; +export declare function lengthOpToJSON(object: LengthOp): string; +/** + * ExistenceProof takes a key and a value and a set of steps to perform on it. + * The result of peforming all these steps will provide a "root hash", which can + * be compared to the value in a header. + * + * Since it is computationally infeasible to produce a hash collission for any of the used + * cryptographic hash functions, if someone can provide a series of operations to transform + * a given key and value into a root hash that matches some trusted root, these key and values + * must be in the referenced merkle tree. + * + * The only possible issue is maliablity in LeafOp, such as providing extra prefix data, + * which should be controlled by a spec. Eg. with lengthOp as NONE, + * prefix = FOO, key = BAR, value = CHOICE + * and + * prefix = F, key = OOBAR, value = CHOICE + * would produce the same value. + * + * With LengthOp this is tricker but not impossible. Which is why the "leafPrefixEqual" field + * in the ProofSpec is valuable to prevent this mutability. And why all trees should + * length-prefix the data before hashing it. + */ +export interface ExistenceProof { + key: Uint8Array; + value: Uint8Array; + leaf?: LeafOp; + path: InnerOp[]; +} +/** + * ExistenceProof takes a key and a value and a set of steps to perform on it. + * The result of peforming all these steps will provide a "root hash", which can + * be compared to the value in a header. + * + * Since it is computationally infeasible to produce a hash collission for any of the used + * cryptographic hash functions, if someone can provide a series of operations to transform + * a given key and value into a root hash that matches some trusted root, these key and values + * must be in the referenced merkle tree. + * + * The only possible issue is maliablity in LeafOp, such as providing extra prefix data, + * which should be controlled by a spec. Eg. with lengthOp as NONE, + * prefix = FOO, key = BAR, value = CHOICE + * and + * prefix = F, key = OOBAR, value = CHOICE + * would produce the same value. + * + * With LengthOp this is tricker but not impossible. Which is why the "leafPrefixEqual" field + * in the ProofSpec is valuable to prevent this mutability. And why all trees should + * length-prefix the data before hashing it. + */ +export interface ExistenceProofSDKType { + key: Uint8Array; + value: Uint8Array; + leaf?: LeafOpSDKType; + path: InnerOpSDKType[]; +} +/** + * NonExistenceProof takes a proof of two neighbors, one left of the desired key, + * one right of the desired key. If both proofs are valid AND they are neighbors, + * then there is no valid proof for the given key. + */ +export interface NonExistenceProof { + /** TODO: remove this as unnecessary??? we prove a range */ + key: Uint8Array; + left?: ExistenceProof; + right?: ExistenceProof; +} +/** + * NonExistenceProof takes a proof of two neighbors, one left of the desired key, + * one right of the desired key. If both proofs are valid AND they are neighbors, + * then there is no valid proof for the given key. + */ +export interface NonExistenceProofSDKType { + key: Uint8Array; + left?: ExistenceProofSDKType; + right?: ExistenceProofSDKType; +} +/** CommitmentProof is either an ExistenceProof or a NonExistenceProof, or a Batch of such messages */ +export interface CommitmentProof { + exist?: ExistenceProof; + nonexist?: NonExistenceProof; + batch?: BatchProof; + compressed?: CompressedBatchProof; +} +/** CommitmentProof is either an ExistenceProof or a NonExistenceProof, or a Batch of such messages */ +export interface CommitmentProofSDKType { + exist?: ExistenceProofSDKType; + nonexist?: NonExistenceProofSDKType; + batch?: BatchProofSDKType; + compressed?: CompressedBatchProofSDKType; +} +/** + * LeafOp represents the raw key-value data we wish to prove, and + * must be flexible to represent the internal transformation from + * the original key-value pairs into the basis hash, for many existing + * merkle trees. + * + * key and value are passed in. So that the signature of this operation is: + * leafOp(key, value) -> output + * + * To process this, first prehash the keys and values if needed (ANY means no hash in this case): + * hkey = prehashKey(key) + * hvalue = prehashValue(value) + * + * Then combine the bytes, and hash it + * output = hash(prefix || length(hkey) || hkey || length(hvalue) || hvalue) + */ +export interface LeafOp { + hash: HashOp; + prehashKey: HashOp; + prehashValue: HashOp; + length: LengthOp; + /** + * prefix is a fixed bytes that may optionally be included at the beginning to differentiate + * a leaf node from an inner node. + */ + prefix: Uint8Array; +} +/** + * LeafOp represents the raw key-value data we wish to prove, and + * must be flexible to represent the internal transformation from + * the original key-value pairs into the basis hash, for many existing + * merkle trees. + * + * key and value are passed in. So that the signature of this operation is: + * leafOp(key, value) -> output + * + * To process this, first prehash the keys and values if needed (ANY means no hash in this case): + * hkey = prehashKey(key) + * hvalue = prehashValue(value) + * + * Then combine the bytes, and hash it + * output = hash(prefix || length(hkey) || hkey || length(hvalue) || hvalue) + */ +export interface LeafOpSDKType { + hash: HashOp; + prehash_key: HashOp; + prehash_value: HashOp; + length: LengthOp; + prefix: Uint8Array; +} +/** + * InnerOp represents a merkle-proof step that is not a leaf. + * It represents concatenating two children and hashing them to provide the next result. + * + * The result of the previous step is passed in, so the signature of this op is: + * innerOp(child) -> output + * + * The result of applying InnerOp should be: + * output = op.hash(op.prefix || child || op.suffix) + * + * where the || operator is concatenation of binary data, + * and child is the result of hashing all the tree below this step. + * + * Any special data, like prepending child with the length, or prepending the entire operation with + * some value to differentiate from leaf nodes, should be included in prefix and suffix. + * If either of prefix or suffix is empty, we just treat it as an empty string + */ +export interface InnerOp { + hash: HashOp; + prefix: Uint8Array; + suffix: Uint8Array; +} +/** + * InnerOp represents a merkle-proof step that is not a leaf. + * It represents concatenating two children and hashing them to provide the next result. + * + * The result of the previous step is passed in, so the signature of this op is: + * innerOp(child) -> output + * + * The result of applying InnerOp should be: + * output = op.hash(op.prefix || child || op.suffix) + * + * where the || operator is concatenation of binary data, + * and child is the result of hashing all the tree below this step. + * + * Any special data, like prepending child with the length, or prepending the entire operation with + * some value to differentiate from leaf nodes, should be included in prefix and suffix. + * If either of prefix or suffix is empty, we just treat it as an empty string + */ +export interface InnerOpSDKType { + hash: HashOp; + prefix: Uint8Array; + suffix: Uint8Array; +} +/** + * ProofSpec defines what the expected parameters are for a given proof type. + * This can be stored in the client and used to validate any incoming proofs. + * + * verify(ProofSpec, Proof) -> Proof | Error + * + * As demonstrated in tests, if we don't fix the algorithm used to calculate the + * LeafHash for a given tree, there are many possible key-value pairs that can + * generate a given hash (by interpretting the preimage differently). + * We need this for proper security, requires client knows a priori what + * tree format server uses. But not in code, rather a configuration object. + */ +export interface ProofSpec { + /** + * any field in the ExistenceProof must be the same as in this spec. + * except Prefix, which is just the first bytes of prefix (spec can be longer) + */ + leafSpec?: LeafOp; + innerSpec?: InnerSpec; + /** max_depth (if > 0) is the maximum number of InnerOps allowed (mainly for fixed-depth tries) */ + maxDepth: number; + /** min_depth (if > 0) is the minimum number of InnerOps allowed (mainly for fixed-depth tries) */ + minDepth: number; +} +/** + * ProofSpec defines what the expected parameters are for a given proof type. + * This can be stored in the client and used to validate any incoming proofs. + * + * verify(ProofSpec, Proof) -> Proof | Error + * + * As demonstrated in tests, if we don't fix the algorithm used to calculate the + * LeafHash for a given tree, there are many possible key-value pairs that can + * generate a given hash (by interpretting the preimage differently). + * We need this for proper security, requires client knows a priori what + * tree format server uses. But not in code, rather a configuration object. + */ +export interface ProofSpecSDKType { + leaf_spec?: LeafOpSDKType; + inner_spec?: InnerSpecSDKType; + max_depth: number; + min_depth: number; +} +/** + * InnerSpec contains all store-specific structure info to determine if two proofs from a + * given store are neighbors. + * + * This enables: + * + * isLeftMost(spec: InnerSpec, op: InnerOp) + * isRightMost(spec: InnerSpec, op: InnerOp) + * isLeftNeighbor(spec: InnerSpec, left: InnerOp, right: InnerOp) + */ +export interface InnerSpec { + /** + * Child order is the ordering of the children node, must count from 0 + * iavl tree is [0, 1] (left then right) + * merk is [0, 2, 1] (left, right, here) + */ + childOrder: number[]; + childSize: number; + minPrefixLength: number; + maxPrefixLength: number; + /** empty child is the prehash image that is used when one child is nil (eg. 20 bytes of 0) */ + emptyChild: Uint8Array; + /** hash is the algorithm that must be used for each InnerOp */ + hash: HashOp; +} +/** + * InnerSpec contains all store-specific structure info to determine if two proofs from a + * given store are neighbors. + * + * This enables: + * + * isLeftMost(spec: InnerSpec, op: InnerOp) + * isRightMost(spec: InnerSpec, op: InnerOp) + * isLeftNeighbor(spec: InnerSpec, left: InnerOp, right: InnerOp) + */ +export interface InnerSpecSDKType { + child_order: number[]; + child_size: number; + min_prefix_length: number; + max_prefix_length: number; + empty_child: Uint8Array; + hash: HashOp; +} +/** BatchProof is a group of multiple proof types than can be compressed */ +export interface BatchProof { + entries: BatchEntry[]; +} +/** BatchProof is a group of multiple proof types than can be compressed */ +export interface BatchProofSDKType { + entries: BatchEntrySDKType[]; +} +/** Use BatchEntry not CommitmentProof, to avoid recursion */ +export interface BatchEntry { + exist?: ExistenceProof; + nonexist?: NonExistenceProof; +} +/** Use BatchEntry not CommitmentProof, to avoid recursion */ +export interface BatchEntrySDKType { + exist?: ExistenceProofSDKType; + nonexist?: NonExistenceProofSDKType; +} +export interface CompressedBatchProof { + entries: CompressedBatchEntry[]; + lookupInners: InnerOp[]; +} +export interface CompressedBatchProofSDKType { + entries: CompressedBatchEntrySDKType[]; + lookup_inners: InnerOpSDKType[]; +} +/** Use BatchEntry not CommitmentProof, to avoid recursion */ +export interface CompressedBatchEntry { + exist?: CompressedExistenceProof; + nonexist?: CompressedNonExistenceProof; +} +/** Use BatchEntry not CommitmentProof, to avoid recursion */ +export interface CompressedBatchEntrySDKType { + exist?: CompressedExistenceProofSDKType; + nonexist?: CompressedNonExistenceProofSDKType; +} +export interface CompressedExistenceProof { + key: Uint8Array; + value: Uint8Array; + leaf?: LeafOp; + /** these are indexes into the lookup_inners table in CompressedBatchProof */ + path: number[]; +} +export interface CompressedExistenceProofSDKType { + key: Uint8Array; + value: Uint8Array; + leaf?: LeafOpSDKType; + path: number[]; +} +export interface CompressedNonExistenceProof { + /** TODO: remove this as unnecessary??? we prove a range */ + key: Uint8Array; + left?: CompressedExistenceProof; + right?: CompressedExistenceProof; +} +export interface CompressedNonExistenceProofSDKType { + key: Uint8Array; + left?: CompressedExistenceProofSDKType; + right?: CompressedExistenceProofSDKType; +} +export declare const ExistenceProof: { + encode(message: ExistenceProof, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ExistenceProof; + fromPartial(object: DeepPartial): ExistenceProof; +}; +export declare const NonExistenceProof: { + encode(message: NonExistenceProof, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): NonExistenceProof; + fromPartial(object: DeepPartial): NonExistenceProof; +}; +export declare const CommitmentProof: { + encode(message: CommitmentProof, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CommitmentProof; + fromPartial(object: DeepPartial): CommitmentProof; +}; +export declare const LeafOp: { + encode(message: LeafOp, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): LeafOp; + fromPartial(object: DeepPartial): LeafOp; +}; +export declare const InnerOp: { + encode(message: InnerOp, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): InnerOp; + fromPartial(object: DeepPartial): InnerOp; +}; +export declare const ProofSpec: { + encode(message: ProofSpec, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ProofSpec; + fromPartial(object: DeepPartial): ProofSpec; +}; +export declare const InnerSpec: { + encode(message: InnerSpec, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): InnerSpec; + fromPartial(object: DeepPartial): InnerSpec; +}; +export declare const BatchProof: { + encode(message: BatchProof, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BatchProof; + fromPartial(object: DeepPartial): BatchProof; +}; +export declare const BatchEntry: { + encode(message: BatchEntry, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BatchEntry; + fromPartial(object: DeepPartial): BatchEntry; +}; +export declare const CompressedBatchProof: { + encode(message: CompressedBatchProof, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CompressedBatchProof; + fromPartial(object: DeepPartial): CompressedBatchProof; +}; +export declare const CompressedBatchEntry: { + encode(message: CompressedBatchEntry, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CompressedBatchEntry; + fromPartial(object: DeepPartial): CompressedBatchEntry; +}; +export declare const CompressedExistenceProof: { + encode(message: CompressedExistenceProof, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CompressedExistenceProof; + fromPartial(object: DeepPartial): CompressedExistenceProof; +}; +export declare const CompressedNonExistenceProof: { + encode(message: CompressedNonExistenceProof, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CompressedNonExistenceProof; + fromPartial(object: DeepPartial): CompressedNonExistenceProof; +}; diff --git a/packages/codegen/dist/cosmos/app/v1alpha1/config.d.ts b/packages/codegen/dist/cosmos/app/v1alpha1/config.d.ts new file mode 100644 index 00000000..c5277666 --- /dev/null +++ b/packages/codegen/dist/cosmos/app/v1alpha1/config.d.ts @@ -0,0 +1,64 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * Config represents the configuration for a Cosmos SDK ABCI app. + * It is intended that all state machine logic including the version of + * baseapp and tx handlers (and possibly even Tendermint) that an app needs + * can be described in a config object. For compatibility, the framework should + * allow a mixture of declarative and imperative app wiring, however, apps + * that strive for the maximum ease of maintainability should be able to describe + * their state machine with a config object alone. + */ +export interface Config { + /** modules are the module configurations for the app. */ + modules: ModuleConfig[]; +} +/** + * Config represents the configuration for a Cosmos SDK ABCI app. + * It is intended that all state machine logic including the version of + * baseapp and tx handlers (and possibly even Tendermint) that an app needs + * can be described in a config object. For compatibility, the framework should + * allow a mixture of declarative and imperative app wiring, however, apps + * that strive for the maximum ease of maintainability should be able to describe + * their state machine with a config object alone. + */ +export interface ConfigSDKType { + modules: ModuleConfigSDKType[]; +} +/** ModuleConfig is a module configuration for an app. */ +export interface ModuleConfig { + /** + * name is the unique name of the module within the app. It should be a name + * that persists between different versions of a module so that modules + * can be smoothly upgraded to new versions. + * + * For example, for the module cosmos.bank.module.v1.Module, we may chose + * to simply name the module "bank" in the app. When we upgrade to + * cosmos.bank.module.v2.Module, the app-specific name "bank" stays the same + * and the framework knows that the v2 module should receive all the same state + * that the v1 module had. Note: modules should provide info on which versions + * they can migrate from in the ModuleDescriptor.can_migration_from field. + */ + name: string; + /** + * config is the config object for the module. Module config messages should + * define a ModuleDescriptor using the cosmos.app.v1alpha1.is_module extension. + */ + config?: Any; +} +/** ModuleConfig is a module configuration for an app. */ +export interface ModuleConfigSDKType { + name: string; + config?: AnySDKType; +} +export declare const Config: { + encode(message: Config, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Config; + fromPartial(object: DeepPartial): Config; +}; +export declare const ModuleConfig: { + encode(message: ModuleConfig, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleConfig; + fromPartial(object: DeepPartial): ModuleConfig; +}; diff --git a/packages/codegen/dist/cosmos/app/v1alpha1/module.d.ts b/packages/codegen/dist/cosmos/app/v1alpha1/module.d.ts new file mode 100644 index 00000000..0aea08c5 --- /dev/null +++ b/packages/codegen/dist/cosmos/app/v1alpha1/module.d.ts @@ -0,0 +1,117 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** ModuleDescriptor describes an app module. */ +export interface ModuleDescriptor { + /** + * go_import names the package that should be imported by an app to load the + * module in the runtime module registry. Either go_import must be defined here + * or the go_package option must be defined at the file level to indicate + * to users where to location the module implementation. go_import takes + * precedence over go_package when both are defined. + */ + goImport: string; + /** + * use_package refers to a protobuf package that this module + * uses and exposes to the world. In an app, only one module should "use" + * or own a single protobuf package. It is assumed that the module uses + * all of the .proto files in a single package. + */ + usePackage: PackageReference[]; + /** + * can_migrate_from defines which module versions this module can migrate + * state from. The framework will check that one module version is able to + * migrate from a previous module version before attempting to update its + * config. It is assumed that modules can transitively migrate from earlier + * versions. For instance if v3 declares it can migrate from v2, and v2 + * declares it can migrate from v1, the framework knows how to migrate + * from v1 to v3, assuming all 3 module versions are registered at runtime. + */ + canMigrateFrom: MigrateFromInfo[]; +} +/** ModuleDescriptor describes an app module. */ +export interface ModuleDescriptorSDKType { + go_import: string; + use_package: PackageReferenceSDKType[]; + can_migrate_from: MigrateFromInfoSDKType[]; +} +/** PackageReference is a reference to a protobuf package used by a module. */ +export interface PackageReference { + /** name is the fully-qualified name of the package. */ + name: string; + /** + * revision is the optional revision of the package that is being used. + * Protobuf packages used in Cosmos should generally have a major version + * as the last part of the package name, ex. foo.bar.baz.v1. + * The revision of a package can be thought of as the minor version of a + * package which has additional backwards compatible definitions that weren't + * present in a previous version. + * + * A package should indicate its revision with a source code comment + * above the package declaration in one of its fields containing the + * test "Revision N" where N is an integer revision. All packages start + * at revision 0 the first time they are released in a module. + * + * When a new version of a module is released and items are added to existing + * .proto files, these definitions should contain comments of the form + * "Since Revision N" where N is an integer revision. + * + * When the module runtime starts up, it will check the pinned proto + * image and panic if there are runtime protobuf definitions that are not + * in the pinned descriptor which do not have + * a "Since Revision N" comment or have a "Since Revision N" comment where + * N is <= to the revision specified here. This indicates that the protobuf + * files have been updated, but the pinned file descriptor hasn't. + * + * If there are items in the pinned file descriptor with a revision + * greater than the value indicated here, this will also cause a panic + * as it may mean that the pinned descriptor for a legacy module has been + * improperly updated or that there is some other versioning discrepancy. + * Runtime protobuf definitions will also be checked for compatibility + * with pinned file descriptors to make sure there are no incompatible changes. + * + * This behavior ensures that: + * * pinned proto images are up-to-date + * * protobuf files are carefully annotated with revision comments which + * are important good client UX + * * protobuf files are changed in backwards and forwards compatible ways + */ + revision: number; +} +/** PackageReference is a reference to a protobuf package used by a module. */ +export interface PackageReferenceSDKType { + name: string; + revision: number; +} +/** + * MigrateFromInfo is information on a module version that a newer module + * can migrate from. + */ +export interface MigrateFromInfo { + /** + * module is the fully-qualified protobuf name of the module config object + * for the previous module version, ex: "cosmos.group.module.v1.Module". + */ + module: string; +} +/** + * MigrateFromInfo is information on a module version that a newer module + * can migrate from. + */ +export interface MigrateFromInfoSDKType { + module: string; +} +export declare const ModuleDescriptor: { + encode(message: ModuleDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleDescriptor; + fromPartial(object: DeepPartial): ModuleDescriptor; +}; +export declare const PackageReference: { + encode(message: PackageReference, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PackageReference; + fromPartial(object: DeepPartial): PackageReference; +}; +export declare const MigrateFromInfo: { + encode(message: MigrateFromInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MigrateFromInfo; + fromPartial(object: DeepPartial): MigrateFromInfo; +}; diff --git a/packages/codegen/dist/cosmos/app/v1alpha1/query.d.ts b/packages/codegen/dist/cosmos/app/v1alpha1/query.d.ts new file mode 100644 index 00000000..43d29bc1 --- /dev/null +++ b/packages/codegen/dist/cosmos/app/v1alpha1/query.d.ts @@ -0,0 +1,28 @@ +import { Config, ConfigSDKType } from "./config"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryConfigRequest is the Query/Config request type. */ +export interface QueryConfigRequest { +} +/** QueryConfigRequest is the Query/Config request type. */ +export interface QueryConfigRequestSDKType { +} +/** QueryConfigRequest is the Query/Config response type. */ +export interface QueryConfigResponse { + /** config is the current app config. */ + config?: Config; +} +/** QueryConfigRequest is the Query/Config response type. */ +export interface QueryConfigResponseSDKType { + config?: ConfigSDKType; +} +export declare const QueryConfigRequest: { + encode(_: QueryConfigRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConfigRequest; + fromPartial(_: DeepPartial): QueryConfigRequest; +}; +export declare const QueryConfigResponse: { + encode(message: QueryConfigResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConfigResponse; + fromPartial(object: DeepPartial): QueryConfigResponse; +}; diff --git a/packages/codegen/dist/cosmos/app/v1alpha1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/app/v1alpha1/query.rpc.Query.d.ts new file mode 100644 index 00000000..5022d334 --- /dev/null +++ b/packages/codegen/dist/cosmos/app/v1alpha1/query.rpc.Query.d.ts @@ -0,0 +1,16 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryConfigRequest, QueryConfigResponse } from "./query"; +/** Query is the app module query service. */ +export interface Query { + /** Config returns the current app config. */ + config(request?: QueryConfigRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + config(request?: QueryConfigRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + config(request?: QueryConfigRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/auth/v1beta1/auth.d.ts b/packages/codegen/dist/cosmos/auth/v1beta1/auth.d.ts new file mode 100644 index 00000000..b9357161 --- /dev/null +++ b/packages/codegen/dist/cosmos/auth/v1beta1/auth.d.ts @@ -0,0 +1,69 @@ +/// +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * BaseAccount defines a base account type. It contains all the necessary fields + * for basic account functionality. Any custom account type should extend this + * type for additional functionality (e.g. vesting). + */ +export interface BaseAccount { + address: string; + pubKey?: Any; + accountNumber: Long; + sequence: Long; +} +/** + * BaseAccount defines a base account type. It contains all the necessary fields + * for basic account functionality. Any custom account type should extend this + * type for additional functionality (e.g. vesting). + */ +export interface BaseAccountSDKType { + address: string; + pub_key?: AnySDKType; + account_number: Long; + sequence: Long; +} +/** ModuleAccount defines an account for modules that holds coins on a pool. */ +export interface ModuleAccount { + baseAccount?: BaseAccount; + name: string; + permissions: string[]; +} +/** ModuleAccount defines an account for modules that holds coins on a pool. */ +export interface ModuleAccountSDKType { + base_account?: BaseAccountSDKType; + name: string; + permissions: string[]; +} +/** Params defines the parameters for the auth module. */ +export interface Params { + maxMemoCharacters: Long; + txSigLimit: Long; + txSizeCostPerByte: Long; + sigVerifyCostEd25519: Long; + sigVerifyCostSecp256k1: Long; +} +/** Params defines the parameters for the auth module. */ +export interface ParamsSDKType { + max_memo_characters: Long; + tx_sig_limit: Long; + tx_size_cost_per_byte: Long; + sig_verify_cost_ed25519: Long; + sig_verify_cost_secp256k1: Long; +} +export declare const BaseAccount: { + encode(message: BaseAccount, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BaseAccount; + fromPartial(object: DeepPartial): BaseAccount; +}; +export declare const ModuleAccount: { + encode(message: ModuleAccount, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleAccount; + fromPartial(object: DeepPartial): ModuleAccount; +}; +export declare const Params: { + encode(message: Params, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Params; + fromPartial(object: DeepPartial): Params; +}; diff --git a/packages/codegen/dist/cosmos/auth/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/auth/v1beta1/genesis.d.ts new file mode 100644 index 00000000..570ed542 --- /dev/null +++ b/packages/codegen/dist/cosmos/auth/v1beta1/genesis.d.ts @@ -0,0 +1,21 @@ +import { Params, ParamsSDKType } from "./auth"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the auth module's genesis state. */ +export interface GenesisState { + /** params defines all the paramaters of the module. */ + params?: Params; + /** accounts are the accounts present at genesis. */ + accounts: Any[]; +} +/** GenesisState defines the auth module's genesis state. */ +export interface GenesisStateSDKType { + params?: ParamsSDKType; + accounts: AnySDKType[]; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/cosmos/auth/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/auth/v1beta1/query.d.ts new file mode 100644 index 00000000..bfa4ddb4 --- /dev/null +++ b/packages/codegen/dist/cosmos/auth/v1beta1/query.d.ts @@ -0,0 +1,205 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Params, ParamsSDKType } from "./auth"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * QueryAccountsRequest is the request type for the Query/Accounts RPC method. + * + * Since: cosmos-sdk 0.43 + */ +export interface QueryAccountsRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryAccountsRequest is the request type for the Query/Accounts RPC method. + * + * Since: cosmos-sdk 0.43 + */ +export interface QueryAccountsRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryAccountsResponse is the response type for the Query/Accounts RPC method. + * + * Since: cosmos-sdk 0.43 + */ +export interface QueryAccountsResponse { + /** accounts are the existing accounts */ + accounts: Any[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryAccountsResponse is the response type for the Query/Accounts RPC method. + * + * Since: cosmos-sdk 0.43 + */ +export interface QueryAccountsResponseSDKType { + accounts: AnySDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryAccountRequest is the request type for the Query/Account RPC method. */ +export interface QueryAccountRequest { + /** address defines the address to query for. */ + address: string; +} +/** QueryAccountRequest is the request type for the Query/Account RPC method. */ +export interface QueryAccountRequestSDKType { + address: string; +} +/** QueryModuleAccountsRequest is the request type for the Query/ModuleAccounts RPC method. */ +export interface QueryModuleAccountsRequest { +} +/** QueryModuleAccountsRequest is the request type for the Query/ModuleAccounts RPC method. */ +export interface QueryModuleAccountsRequestSDKType { +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params?: Params; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +/** QueryAccountResponse is the response type for the Query/Account RPC method. */ +export interface QueryAccountResponse { + /** account defines the account of the corresponding address. */ + account?: Any; +} +/** QueryAccountResponse is the response type for the Query/Account RPC method. */ +export interface QueryAccountResponseSDKType { + account?: AnySDKType; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequest { +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequestSDKType { +} +/** QueryModuleAccountsResponse is the response type for the Query/ModuleAccounts RPC method. */ +export interface QueryModuleAccountsResponse { + accounts: Any[]; +} +/** QueryModuleAccountsResponse is the response type for the Query/ModuleAccounts RPC method. */ +export interface QueryModuleAccountsResponseSDKType { + accounts: AnySDKType[]; +} +/** Bech32PrefixRequest is the request type for Bech32Prefix rpc method */ +export interface Bech32PrefixRequest { +} +/** Bech32PrefixRequest is the request type for Bech32Prefix rpc method */ +export interface Bech32PrefixRequestSDKType { +} +/** Bech32PrefixResponse is the response type for Bech32Prefix rpc method */ +export interface Bech32PrefixResponse { + bech32Prefix: string; +} +/** Bech32PrefixResponse is the response type for Bech32Prefix rpc method */ +export interface Bech32PrefixResponseSDKType { + bech32_prefix: string; +} +/** AddressBytesToStringRequest is the request type for AddressString rpc method */ +export interface AddressBytesToStringRequest { + addressBytes: Uint8Array; +} +/** AddressBytesToStringRequest is the request type for AddressString rpc method */ +export interface AddressBytesToStringRequestSDKType { + address_bytes: Uint8Array; +} +/** AddressBytesToStringResponse is the response type for AddressString rpc method */ +export interface AddressBytesToStringResponse { + addressString: string; +} +/** AddressBytesToStringResponse is the response type for AddressString rpc method */ +export interface AddressBytesToStringResponseSDKType { + address_string: string; +} +/** AddressStringToBytesRequest is the request type for AccountBytes rpc method */ +export interface AddressStringToBytesRequest { + addressString: string; +} +/** AddressStringToBytesRequest is the request type for AccountBytes rpc method */ +export interface AddressStringToBytesRequestSDKType { + address_string: string; +} +/** AddressStringToBytesResponse is the response type for AddressBytes rpc method */ +export interface AddressStringToBytesResponse { + addressBytes: Uint8Array; +} +/** AddressStringToBytesResponse is the response type for AddressBytes rpc method */ +export interface AddressStringToBytesResponseSDKType { + address_bytes: Uint8Array; +} +export declare const QueryAccountsRequest: { + encode(message: QueryAccountsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountsRequest; + fromPartial(object: DeepPartial): QueryAccountsRequest; +}; +export declare const QueryAccountsResponse: { + encode(message: QueryAccountsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountsResponse; + fromPartial(object: DeepPartial): QueryAccountsResponse; +}; +export declare const QueryAccountRequest: { + encode(message: QueryAccountRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountRequest; + fromPartial(object: DeepPartial): QueryAccountRequest; +}; +export declare const QueryModuleAccountsRequest: { + encode(_: QueryModuleAccountsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleAccountsRequest; + fromPartial(_: DeepPartial): QueryModuleAccountsRequest; +}; +export declare const QueryParamsResponse: { + encode(message: QueryParamsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse; + fromPartial(object: DeepPartial): QueryParamsResponse; +}; +export declare const QueryAccountResponse: { + encode(message: QueryAccountResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountResponse; + fromPartial(object: DeepPartial): QueryAccountResponse; +}; +export declare const QueryParamsRequest: { + encode(_: QueryParamsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest; + fromPartial(_: DeepPartial): QueryParamsRequest; +}; +export declare const QueryModuleAccountsResponse: { + encode(message: QueryModuleAccountsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleAccountsResponse; + fromPartial(object: DeepPartial): QueryModuleAccountsResponse; +}; +export declare const Bech32PrefixRequest: { + encode(_: Bech32PrefixRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Bech32PrefixRequest; + fromPartial(_: DeepPartial): Bech32PrefixRequest; +}; +export declare const Bech32PrefixResponse: { + encode(message: Bech32PrefixResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Bech32PrefixResponse; + fromPartial(object: DeepPartial): Bech32PrefixResponse; +}; +export declare const AddressBytesToStringRequest: { + encode(message: AddressBytesToStringRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AddressBytesToStringRequest; + fromPartial(object: DeepPartial): AddressBytesToStringRequest; +}; +export declare const AddressBytesToStringResponse: { + encode(message: AddressBytesToStringResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AddressBytesToStringResponse; + fromPartial(object: DeepPartial): AddressBytesToStringResponse; +}; +export declare const AddressStringToBytesRequest: { + encode(message: AddressStringToBytesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AddressStringToBytesRequest; + fromPartial(object: DeepPartial): AddressStringToBytesRequest; +}; +export declare const AddressStringToBytesResponse: { + encode(message: AddressStringToBytesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AddressStringToBytesResponse; + fromPartial(object: DeepPartial): AddressStringToBytesResponse; +}; diff --git a/packages/codegen/dist/cosmos/auth/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/auth/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..810e56e8 --- /dev/null +++ b/packages/codegen/dist/cosmos/auth/v1beta1/query.lcd.d.ts @@ -0,0 +1,15 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryAccountsRequest, QueryAccountsResponseSDKType, QueryAccountRequest, QueryAccountResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType, QueryModuleAccountsRequest, QueryModuleAccountsResponseSDKType, Bech32PrefixRequest, Bech32PrefixResponseSDKType, AddressBytesToStringRequest, AddressBytesToStringResponseSDKType, AddressStringToBytesRequest, AddressStringToBytesResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + accounts(params?: QueryAccountsRequest): Promise; + account(params: QueryAccountRequest): Promise; + params(_params?: QueryParamsRequest): Promise; + moduleAccounts(_params?: QueryModuleAccountsRequest): Promise; + bech32Prefix(_params?: Bech32PrefixRequest): Promise; + addressBytesToString(params: AddressBytesToStringRequest): Promise; + addressStringToBytes(params: AddressStringToBytesRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/auth/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/auth/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..6e3b8d77 --- /dev/null +++ b/packages/codegen/dist/cosmos/auth/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,44 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryAccountsRequest, QueryAccountsResponse, QueryAccountRequest, QueryAccountResponse, QueryParamsRequest, QueryParamsResponse, QueryModuleAccountsRequest, QueryModuleAccountsResponse, Bech32PrefixRequest, Bech32PrefixResponse, AddressBytesToStringRequest, AddressBytesToStringResponse, AddressStringToBytesRequest, AddressStringToBytesResponse } from "./query"; +/** Query defines the gRPC querier service. */ +export interface Query { + /** + * Accounts returns all the existing accounts + * + * Since: cosmos-sdk 0.43 + */ + accounts(request?: QueryAccountsRequest): Promise; + /** Account returns account details based on address. */ + account(request: QueryAccountRequest): Promise; + /** Params queries all parameters. */ + params(request?: QueryParamsRequest): Promise; + /** ModuleAccounts returns all the existing module accounts. */ + moduleAccounts(request?: QueryModuleAccountsRequest): Promise; + /** Bech32 queries bech32Prefix */ + bech32Prefix(request?: Bech32PrefixRequest): Promise; + /** AddressBytesToString converts Account Address bytes to string */ + addressBytesToString(request: AddressBytesToStringRequest): Promise; + /** AddressStringToBytes converts Address string to bytes */ + addressStringToBytes(request: AddressStringToBytesRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + accounts(request?: QueryAccountsRequest): Promise; + account(request: QueryAccountRequest): Promise; + params(request?: QueryParamsRequest): Promise; + moduleAccounts(request?: QueryModuleAccountsRequest): Promise; + bech32Prefix(request?: Bech32PrefixRequest): Promise; + addressBytesToString(request: AddressBytesToStringRequest): Promise; + addressStringToBytes(request: AddressStringToBytesRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + accounts(request?: QueryAccountsRequest): Promise; + account(request: QueryAccountRequest): Promise; + params(request?: QueryParamsRequest): Promise; + moduleAccounts(request?: QueryModuleAccountsRequest): Promise; + bech32Prefix(request?: Bech32PrefixRequest): Promise; + addressBytesToString(request: AddressBytesToStringRequest): Promise; + addressStringToBytes(request: AddressStringToBytesRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/authz/v1beta1/authz.d.ts b/packages/codegen/dist/cosmos/authz/v1beta1/authz.d.ts new file mode 100644 index 00000000..532dd755 --- /dev/null +++ b/packages/codegen/dist/cosmos/authz/v1beta1/authz.d.ts @@ -0,0 +1,88 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * GenericAuthorization gives the grantee unrestricted permissions to execute + * the provided method on behalf of the granter's account. + */ +export interface GenericAuthorization { + /** Msg, identified by it's type URL, to grant unrestricted permissions to execute */ + msg: string; +} +/** + * GenericAuthorization gives the grantee unrestricted permissions to execute + * the provided method on behalf of the granter's account. + */ +export interface GenericAuthorizationSDKType { + msg: string; +} +/** + * Grant gives permissions to execute + * the provide method with expiration time. + */ +export interface Grant { + authorization?: Any; + /** + * time when the grant will expire and will be pruned. If null, then the grant + * doesn't have a time expiration (other conditions in `authorization` + * may apply to invalidate the grant) + */ + expiration?: Date; +} +/** + * Grant gives permissions to execute + * the provide method with expiration time. + */ +export interface GrantSDKType { + authorization?: AnySDKType; + expiration?: Date; +} +/** + * GrantAuthorization extends a grant with both the addresses of the grantee and granter. + * It is used in genesis.proto and query.proto + */ +export interface GrantAuthorization { + granter: string; + grantee: string; + authorization?: Any; + expiration?: Date; +} +/** + * GrantAuthorization extends a grant with both the addresses of the grantee and granter. + * It is used in genesis.proto and query.proto + */ +export interface GrantAuthorizationSDKType { + granter: string; + grantee: string; + authorization?: AnySDKType; + expiration?: Date; +} +/** GrantQueueItem contains the list of TypeURL of a sdk.Msg. */ +export interface GrantQueueItem { + /** msg_type_urls contains the list of TypeURL of a sdk.Msg. */ + msgTypeUrls: string[]; +} +/** GrantQueueItem contains the list of TypeURL of a sdk.Msg. */ +export interface GrantQueueItemSDKType { + msg_type_urls: string[]; +} +export declare const GenericAuthorization: { + encode(message: GenericAuthorization, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenericAuthorization; + fromPartial(object: DeepPartial): GenericAuthorization; +}; +export declare const Grant: { + encode(message: Grant, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Grant; + fromPartial(object: DeepPartial): Grant; +}; +export declare const GrantAuthorization: { + encode(message: GrantAuthorization, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GrantAuthorization; + fromPartial(object: DeepPartial): GrantAuthorization; +}; +export declare const GrantQueueItem: { + encode(message: GrantQueueItem, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GrantQueueItem; + fromPartial(object: DeepPartial): GrantQueueItem; +}; diff --git a/packages/codegen/dist/cosmos/authz/v1beta1/event.d.ts b/packages/codegen/dist/cosmos/authz/v1beta1/event.d.ts new file mode 100644 index 00000000..9bac8906 --- /dev/null +++ b/packages/codegen/dist/cosmos/authz/v1beta1/event.d.ts @@ -0,0 +1,42 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** EventGrant is emitted on Msg/Grant */ +export interface EventGrant { + /** Msg type URL for which an autorization is granted */ + msgTypeUrl: string; + /** Granter account address */ + granter: string; + /** Grantee account address */ + grantee: string; +} +/** EventGrant is emitted on Msg/Grant */ +export interface EventGrantSDKType { + msg_type_url: string; + granter: string; + grantee: string; +} +/** EventRevoke is emitted on Msg/Revoke */ +export interface EventRevoke { + /** Msg type URL for which an autorization is revoked */ + msgTypeUrl: string; + /** Granter account address */ + granter: string; + /** Grantee account address */ + grantee: string; +} +/** EventRevoke is emitted on Msg/Revoke */ +export interface EventRevokeSDKType { + msg_type_url: string; + granter: string; + grantee: string; +} +export declare const EventGrant: { + encode(message: EventGrant, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventGrant; + fromPartial(object: DeepPartial): EventGrant; +}; +export declare const EventRevoke: { + encode(message: EventRevoke, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventRevoke; + fromPartial(object: DeepPartial): EventRevoke; +}; diff --git a/packages/codegen/dist/cosmos/authz/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/authz/v1beta1/genesis.d.ts new file mode 100644 index 00000000..c17384ee --- /dev/null +++ b/packages/codegen/dist/cosmos/authz/v1beta1/genesis.d.ts @@ -0,0 +1,16 @@ +import { GrantAuthorization, GrantAuthorizationSDKType } from "./authz"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the authz module's genesis state. */ +export interface GenesisState { + authorization: GrantAuthorization[]; +} +/** GenesisState defines the authz module's genesis state. */ +export interface GenesisStateSDKType { + authorization: GrantAuthorizationSDKType[]; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/cosmos/authz/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/authz/v1beta1/query.d.ts new file mode 100644 index 00000000..8d93cef8 --- /dev/null +++ b/packages/codegen/dist/cosmos/authz/v1beta1/query.d.ts @@ -0,0 +1,108 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Grant, GrantSDKType, GrantAuthorization, GrantAuthorizationSDKType } from "./authz"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryGrantsRequest is the request type for the Query/Grants RPC method. */ +export interface QueryGrantsRequest { + granter: string; + grantee: string; + /** Optional, msg_type_url, when set, will query only grants matching given msg type. */ + msgTypeUrl: string; + /** pagination defines an pagination for the request. */ + pagination?: PageRequest; +} +/** QueryGrantsRequest is the request type for the Query/Grants RPC method. */ +export interface QueryGrantsRequestSDKType { + granter: string; + grantee: string; + msg_type_url: string; + pagination?: PageRequestSDKType; +} +/** QueryGrantsResponse is the response type for the Query/Authorizations RPC method. */ +export interface QueryGrantsResponse { + /** authorizations is a list of grants granted for grantee by granter. */ + grants: Grant[]; + /** pagination defines an pagination for the response. */ + pagination?: PageResponse; +} +/** QueryGrantsResponse is the response type for the Query/Authorizations RPC method. */ +export interface QueryGrantsResponseSDKType { + grants: GrantSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryGranterGrantsRequest is the request type for the Query/GranterGrants RPC method. */ +export interface QueryGranterGrantsRequest { + granter: string; + /** pagination defines an pagination for the request. */ + pagination?: PageRequest; +} +/** QueryGranterGrantsRequest is the request type for the Query/GranterGrants RPC method. */ +export interface QueryGranterGrantsRequestSDKType { + granter: string; + pagination?: PageRequestSDKType; +} +/** QueryGranterGrantsResponse is the response type for the Query/GranterGrants RPC method. */ +export interface QueryGranterGrantsResponse { + /** grants is a list of grants granted by the granter. */ + grants: GrantAuthorization[]; + /** pagination defines an pagination for the response. */ + pagination?: PageResponse; +} +/** QueryGranterGrantsResponse is the response type for the Query/GranterGrants RPC method. */ +export interface QueryGranterGrantsResponseSDKType { + grants: GrantAuthorizationSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryGranteeGrantsRequest is the request type for the Query/IssuedGrants RPC method. */ +export interface QueryGranteeGrantsRequest { + grantee: string; + /** pagination defines an pagination for the request. */ + pagination?: PageRequest; +} +/** QueryGranteeGrantsRequest is the request type for the Query/IssuedGrants RPC method. */ +export interface QueryGranteeGrantsRequestSDKType { + grantee: string; + pagination?: PageRequestSDKType; +} +/** QueryGranteeGrantsResponse is the response type for the Query/GranteeGrants RPC method. */ +export interface QueryGranteeGrantsResponse { + /** grants is a list of grants granted to the grantee. */ + grants: GrantAuthorization[]; + /** pagination defines an pagination for the response. */ + pagination?: PageResponse; +} +/** QueryGranteeGrantsResponse is the response type for the Query/GranteeGrants RPC method. */ +export interface QueryGranteeGrantsResponseSDKType { + grants: GrantAuthorizationSDKType[]; + pagination?: PageResponseSDKType; +} +export declare const QueryGrantsRequest: { + encode(message: QueryGrantsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGrantsRequest; + fromPartial(object: DeepPartial): QueryGrantsRequest; +}; +export declare const QueryGrantsResponse: { + encode(message: QueryGrantsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGrantsResponse; + fromPartial(object: DeepPartial): QueryGrantsResponse; +}; +export declare const QueryGranterGrantsRequest: { + encode(message: QueryGranterGrantsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGranterGrantsRequest; + fromPartial(object: DeepPartial): QueryGranterGrantsRequest; +}; +export declare const QueryGranterGrantsResponse: { + encode(message: QueryGranterGrantsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGranterGrantsResponse; + fromPartial(object: DeepPartial): QueryGranterGrantsResponse; +}; +export declare const QueryGranteeGrantsRequest: { + encode(message: QueryGranteeGrantsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGranteeGrantsRequest; + fromPartial(object: DeepPartial): QueryGranteeGrantsRequest; +}; +export declare const QueryGranteeGrantsResponse: { + encode(message: QueryGranteeGrantsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGranteeGrantsResponse; + fromPartial(object: DeepPartial): QueryGranteeGrantsResponse; +}; diff --git a/packages/codegen/dist/cosmos/authz/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/authz/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..7e4f0a08 --- /dev/null +++ b/packages/codegen/dist/cosmos/authz/v1beta1/query.lcd.d.ts @@ -0,0 +1,11 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryGrantsRequest, QueryGrantsResponseSDKType, QueryGranterGrantsRequest, QueryGranterGrantsResponseSDKType, QueryGranteeGrantsRequest, QueryGranteeGrantsResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + grants(params: QueryGrantsRequest): Promise; + granterGrants(params: QueryGranterGrantsRequest): Promise; + granteeGrants(params: QueryGranteeGrantsRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/authz/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/authz/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..d7f1381d --- /dev/null +++ b/packages/codegen/dist/cosmos/authz/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,32 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryGrantsRequest, QueryGrantsResponse, QueryGranterGrantsRequest, QueryGranterGrantsResponse, QueryGranteeGrantsRequest, QueryGranteeGrantsResponse } from "./query"; +/** Query defines the gRPC querier service. */ +export interface Query { + /** Returns list of `Authorization`, granted to the grantee by the granter. */ + grants(request: QueryGrantsRequest): Promise; + /** + * GranterGrants returns list of `GrantAuthorization`, granted by granter. + * + * Since: cosmos-sdk 0.46 + */ + granterGrants(request: QueryGranterGrantsRequest): Promise; + /** + * GranteeGrants returns a list of `GrantAuthorization` by grantee. + * + * Since: cosmos-sdk 0.46 + */ + granteeGrants(request: QueryGranteeGrantsRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + grants(request: QueryGrantsRequest): Promise; + granterGrants(request: QueryGranterGrantsRequest): Promise; + granteeGrants(request: QueryGranteeGrantsRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + grants(request: QueryGrantsRequest): Promise; + granterGrants(request: QueryGranterGrantsRequest): Promise; + granteeGrants(request: QueryGranteeGrantsRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/authz/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/authz/v1beta1/tx.d.ts new file mode 100644 index 00000000..a5fc1f48 --- /dev/null +++ b/packages/codegen/dist/cosmos/authz/v1beta1/tx.d.ts @@ -0,0 +1,113 @@ +import { Grant, GrantSDKType } from "./authz"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * MsgGrant is a request type for Grant method. It declares authorization to the grantee + * on behalf of the granter with the provided expiration time. + */ +export interface MsgGrant { + granter: string; + grantee: string; + grant?: Grant; +} +/** + * MsgGrant is a request type for Grant method. It declares authorization to the grantee + * on behalf of the granter with the provided expiration time. + */ +export interface MsgGrantSDKType { + granter: string; + grantee: string; + grant?: GrantSDKType; +} +/** MsgExecResponse defines the Msg/MsgExecResponse response type. */ +export interface MsgExecResponse { + results: Uint8Array[]; +} +/** MsgExecResponse defines the Msg/MsgExecResponse response type. */ +export interface MsgExecResponseSDKType { + results: Uint8Array[]; +} +/** + * MsgExec attempts to execute the provided messages using + * authorizations granted to the grantee. Each message should have only + * one signer corresponding to the granter of the authorization. + */ +export interface MsgExec { + grantee: string; + /** + * Authorization Msg requests to execute. Each msg must implement Authorization interface + * The x/authz will try to find a grant matching (msg.signers[0], grantee, MsgTypeURL(msg)) + * triple and validate it. + */ + msgs: Any[]; +} +/** + * MsgExec attempts to execute the provided messages using + * authorizations granted to the grantee. Each message should have only + * one signer corresponding to the granter of the authorization. + */ +export interface MsgExecSDKType { + grantee: string; + msgs: AnySDKType[]; +} +/** MsgGrantResponse defines the Msg/MsgGrant response type. */ +export interface MsgGrantResponse { +} +/** MsgGrantResponse defines the Msg/MsgGrant response type. */ +export interface MsgGrantResponseSDKType { +} +/** + * MsgRevoke revokes any authorization with the provided sdk.Msg type on the + * granter's account with that has been granted to the grantee. + */ +export interface MsgRevoke { + granter: string; + grantee: string; + msgTypeUrl: string; +} +/** + * MsgRevoke revokes any authorization with the provided sdk.Msg type on the + * granter's account with that has been granted to the grantee. + */ +export interface MsgRevokeSDKType { + granter: string; + grantee: string; + msg_type_url: string; +} +/** MsgRevokeResponse defines the Msg/MsgRevokeResponse response type. */ +export interface MsgRevokeResponse { +} +/** MsgRevokeResponse defines the Msg/MsgRevokeResponse response type. */ +export interface MsgRevokeResponseSDKType { +} +export declare const MsgGrant: { + encode(message: MsgGrant, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgGrant; + fromPartial(object: DeepPartial): MsgGrant; +}; +export declare const MsgExecResponse: { + encode(message: MsgExecResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExecResponse; + fromPartial(object: DeepPartial): MsgExecResponse; +}; +export declare const MsgExec: { + encode(message: MsgExec, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExec; + fromPartial(object: DeepPartial): MsgExec; +}; +export declare const MsgGrantResponse: { + encode(_: MsgGrantResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgGrantResponse; + fromPartial(_: DeepPartial): MsgGrantResponse; +}; +export declare const MsgRevoke: { + encode(message: MsgRevoke, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevoke; + fromPartial(object: DeepPartial): MsgRevoke; +}; +export declare const MsgRevokeResponse: { + encode(_: MsgRevokeResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeResponse; + fromPartial(_: DeepPartial): MsgRevokeResponse; +}; diff --git a/packages/codegen/dist/cosmos/authz/v1beta1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/authz/v1beta1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..3bcce35a --- /dev/null +++ b/packages/codegen/dist/cosmos/authz/v1beta1/tx.rpc.msg.d.ts @@ -0,0 +1,30 @@ +import { Rpc } from "../../../helpers"; +import { MsgGrant, MsgGrantResponse, MsgExec, MsgExecResponse, MsgRevoke, MsgRevokeResponse } from "./tx"; +/** Msg defines the authz Msg service. */ +export interface Msg { + /** + * Grant grants the provided authorization to the grantee on the granter's + * account with the provided expiration time. If there is already a grant + * for the given (granter, grantee, Authorization) triple, then the grant + * will be overwritten. + */ + grant(request: MsgGrant): Promise; + /** + * Exec attempts to execute the provided messages using + * authorizations granted to the grantee. Each message should have only + * one signer corresponding to the granter of the authorization. + */ + exec(request: MsgExec): Promise; + /** + * Revoke revokes any authorization corresponding to the provided method name on the + * granter's account that has been granted to the grantee. + */ + revoke(request: MsgRevoke): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + grant(request: MsgGrant): Promise; + exec(request: MsgExec): Promise; + revoke(request: MsgRevoke): Promise; +} diff --git a/packages/codegen/dist/cosmos/bank/v1beta1/authz.d.ts b/packages/codegen/dist/cosmos/bank/v1beta1/authz.d.ts new file mode 100644 index 00000000..34bfd718 --- /dev/null +++ b/packages/codegen/dist/cosmos/bank/v1beta1/authz.d.ts @@ -0,0 +1,26 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * SendAuthorization allows the grantee to spend up to spend_limit coins from + * the granter's account. + * + * Since: cosmos-sdk 0.43 + */ +export interface SendAuthorization { + spendLimit: Coin[]; +} +/** + * SendAuthorization allows the grantee to spend up to spend_limit coins from + * the granter's account. + * + * Since: cosmos-sdk 0.43 + */ +export interface SendAuthorizationSDKType { + spend_limit: CoinSDKType[]; +} +export declare const SendAuthorization: { + encode(message: SendAuthorization, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SendAuthorization; + fromPartial(object: DeepPartial): SendAuthorization; +}; diff --git a/packages/codegen/dist/cosmos/bank/v1beta1/bank.d.ts b/packages/codegen/dist/cosmos/bank/v1beta1/bank.d.ts new file mode 100644 index 00000000..47e94746 --- /dev/null +++ b/packages/codegen/dist/cosmos/bank/v1beta1/bank.d.ts @@ -0,0 +1,185 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** Params defines the parameters for the bank module. */ +export interface Params { + sendEnabled: SendEnabled[]; + defaultSendEnabled: boolean; +} +/** Params defines the parameters for the bank module. */ +export interface ParamsSDKType { + send_enabled: SendEnabledSDKType[]; + default_send_enabled: boolean; +} +/** + * SendEnabled maps coin denom to a send_enabled status (whether a denom is + * sendable). + */ +export interface SendEnabled { + denom: string; + enabled: boolean; +} +/** + * SendEnabled maps coin denom to a send_enabled status (whether a denom is + * sendable). + */ +export interface SendEnabledSDKType { + denom: string; + enabled: boolean; +} +/** Input models transaction input. */ +export interface Input { + address: string; + coins: Coin[]; +} +/** Input models transaction input. */ +export interface InputSDKType { + address: string; + coins: CoinSDKType[]; +} +/** Output models transaction outputs. */ +export interface Output { + address: string; + coins: Coin[]; +} +/** Output models transaction outputs. */ +export interface OutputSDKType { + address: string; + coins: CoinSDKType[]; +} +/** + * Supply represents a struct that passively keeps track of the total supply + * amounts in the network. + * This message is deprecated now that supply is indexed by denom. + */ +/** @deprecated */ +export interface Supply { + total: Coin[]; +} +/** + * Supply represents a struct that passively keeps track of the total supply + * amounts in the network. + * This message is deprecated now that supply is indexed by denom. + */ +/** @deprecated */ +export interface SupplySDKType { + total: CoinSDKType[]; +} +/** + * DenomUnit represents a struct that describes a given + * denomination unit of the basic token. + */ +export interface DenomUnit { + /** denom represents the string name of the given denom unit (e.g uatom). */ + denom: string; + /** + * exponent represents power of 10 exponent that one must + * raise the base_denom to in order to equal the given DenomUnit's denom + * 1 denom = 10^exponent base_denom + * (e.g. with a base_denom of uatom, one can create a DenomUnit of 'atom' with + * exponent = 6, thus: 1 atom = 10^6 uatom). + */ + exponent: number; + /** aliases is a list of string aliases for the given denom */ + aliases: string[]; +} +/** + * DenomUnit represents a struct that describes a given + * denomination unit of the basic token. + */ +export interface DenomUnitSDKType { + denom: string; + exponent: number; + aliases: string[]; +} +/** + * Metadata represents a struct that describes + * a basic token. + */ +export interface Metadata { + description: string; + /** denom_units represents the list of DenomUnit's for a given coin */ + denomUnits: DenomUnit[]; + /** base represents the base denom (should be the DenomUnit with exponent = 0). */ + base: string; + /** + * display indicates the suggested denom that should be + * displayed in clients. + */ + display: string; + /** + * name defines the name of the token (eg: Cosmos Atom) + * + * Since: cosmos-sdk 0.43 + */ + name: string; + /** + * symbol is the token symbol usually shown on exchanges (eg: ATOM). This can + * be the same as the display. + * + * Since: cosmos-sdk 0.43 + */ + symbol: string; + /** + * URI to a document (on or off-chain) that contains additional information. Optional. + * + * Since: cosmos-sdk 0.46 + */ + uri: string; + /** + * URIHash is a sha256 hash of a document pointed by URI. It's used to verify that + * the document didn't change. Optional. + * + * Since: cosmos-sdk 0.46 + */ + uriHash: string; +} +/** + * Metadata represents a struct that describes + * a basic token. + */ +export interface MetadataSDKType { + description: string; + denom_units: DenomUnitSDKType[]; + base: string; + display: string; + name: string; + symbol: string; + uri: string; + uri_hash: string; +} +export declare const Params: { + encode(message: Params, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Params; + fromPartial(object: DeepPartial): Params; +}; +export declare const SendEnabled: { + encode(message: SendEnabled, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SendEnabled; + fromPartial(object: DeepPartial): SendEnabled; +}; +export declare const Input: { + encode(message: Input, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Input; + fromPartial(object: DeepPartial): Input; +}; +export declare const Output: { + encode(message: Output, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Output; + fromPartial(object: DeepPartial): Output; +}; +export declare const Supply: { + encode(message: Supply, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Supply; + fromPartial(object: DeepPartial): Supply; +}; +export declare const DenomUnit: { + encode(message: DenomUnit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DenomUnit; + fromPartial(object: DeepPartial): DenomUnit; +}; +export declare const Metadata: { + encode(message: Metadata, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Metadata; + fromPartial(object: DeepPartial): Metadata; +}; diff --git a/packages/codegen/dist/cosmos/bank/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/bank/v1beta1/genesis.d.ts new file mode 100644 index 00000000..9c967806 --- /dev/null +++ b/packages/codegen/dist/cosmos/bank/v1beta1/genesis.d.ts @@ -0,0 +1,53 @@ +import { Params, ParamsSDKType, Metadata, MetadataSDKType } from "./bank"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the bank module's genesis state. */ +export interface GenesisState { + /** params defines all the paramaters of the module. */ + params?: Params; + /** balances is an array containing the balances of all the accounts. */ + balances: Balance[]; + /** + * supply represents the total supply. If it is left empty, then supply will be calculated based on the provided + * balances. Otherwise, it will be used to validate that the sum of the balances equals this amount. + */ + supply: Coin[]; + /** denom_metadata defines the metadata of the differents coins. */ + denomMetadata: Metadata[]; +} +/** GenesisState defines the bank module's genesis state. */ +export interface GenesisStateSDKType { + params?: ParamsSDKType; + balances: BalanceSDKType[]; + supply: CoinSDKType[]; + denom_metadata: MetadataSDKType[]; +} +/** + * Balance defines an account address and balance pair used in the bank module's + * genesis state. + */ +export interface Balance { + /** address is the address of the balance holder. */ + address: string; + /** coins defines the different coins this balance holds. */ + coins: Coin[]; +} +/** + * Balance defines an account address and balance pair used in the bank module's + * genesis state. + */ +export interface BalanceSDKType { + address: string; + coins: CoinSDKType[]; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; +export declare const Balance: { + encode(message: Balance, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Balance; + fromPartial(object: DeepPartial): Balance; +}; diff --git a/packages/codegen/dist/cosmos/bank/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/bank/v1beta1/query.d.ts new file mode 100644 index 00000000..0998895e --- /dev/null +++ b/packages/codegen/dist/cosmos/bank/v1beta1/query.d.ts @@ -0,0 +1,362 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Params, ParamsSDKType, Metadata, MetadataSDKType } from "./bank"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryBalanceRequest is the request type for the Query/Balance RPC method. */ +export interface QueryBalanceRequest { + /** address is the address to query balances for. */ + address: string; + /** denom is the coin denom to query balances for. */ + denom: string; +} +/** QueryBalanceRequest is the request type for the Query/Balance RPC method. */ +export interface QueryBalanceRequestSDKType { + address: string; + denom: string; +} +/** QueryBalanceResponse is the response type for the Query/Balance RPC method. */ +export interface QueryBalanceResponse { + /** balance is the balance of the coin. */ + balance?: Coin; +} +/** QueryBalanceResponse is the response type for the Query/Balance RPC method. */ +export interface QueryBalanceResponseSDKType { + balance?: CoinSDKType; +} +/** QueryBalanceRequest is the request type for the Query/AllBalances RPC method. */ +export interface QueryAllBalancesRequest { + /** address is the address to query balances for. */ + address: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryBalanceRequest is the request type for the Query/AllBalances RPC method. */ +export interface QueryAllBalancesRequestSDKType { + address: string; + pagination?: PageRequestSDKType; +} +/** + * QueryAllBalancesResponse is the response type for the Query/AllBalances RPC + * method. + */ +export interface QueryAllBalancesResponse { + /** balances is the balances of all the coins. */ + balances: Coin[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryAllBalancesResponse is the response type for the Query/AllBalances RPC + * method. + */ +export interface QueryAllBalancesResponseSDKType { + balances: CoinSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QuerySpendableBalancesRequest defines the gRPC request structure for querying + * an account's spendable balances. + */ +export interface QuerySpendableBalancesRequest { + /** address is the address to query spendable balances for. */ + address: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QuerySpendableBalancesRequest defines the gRPC request structure for querying + * an account's spendable balances. + */ +export interface QuerySpendableBalancesRequestSDKType { + address: string; + pagination?: PageRequestSDKType; +} +/** + * QuerySpendableBalancesResponse defines the gRPC response structure for querying + * an account's spendable balances. + */ +export interface QuerySpendableBalancesResponse { + /** balances is the spendable balances of all the coins. */ + balances: Coin[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QuerySpendableBalancesResponse defines the gRPC response structure for querying + * an account's spendable balances. + */ +export interface QuerySpendableBalancesResponseSDKType { + balances: CoinSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryTotalSupplyRequest is the request type for the Query/TotalSupply RPC + * method. + */ +export interface QueryTotalSupplyRequest { + /** + * pagination defines an optional pagination for the request. + * + * Since: cosmos-sdk 0.43 + */ + pagination?: PageRequest; +} +/** + * QueryTotalSupplyRequest is the request type for the Query/TotalSupply RPC + * method. + */ +export interface QueryTotalSupplyRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryTotalSupplyResponse is the response type for the Query/TotalSupply RPC + * method + */ +export interface QueryTotalSupplyResponse { + /** supply is the supply of the coins */ + supply: Coin[]; + /** + * pagination defines the pagination in the response. + * + * Since: cosmos-sdk 0.43 + */ + pagination?: PageResponse; +} +/** + * QueryTotalSupplyResponse is the response type for the Query/TotalSupply RPC + * method + */ +export interface QueryTotalSupplyResponseSDKType { + supply: CoinSDKType[]; + pagination?: PageResponseSDKType; +} +/** QuerySupplyOfRequest is the request type for the Query/SupplyOf RPC method. */ +export interface QuerySupplyOfRequest { + /** denom is the coin denom to query balances for. */ + denom: string; +} +/** QuerySupplyOfRequest is the request type for the Query/SupplyOf RPC method. */ +export interface QuerySupplyOfRequestSDKType { + denom: string; +} +/** QuerySupplyOfResponse is the response type for the Query/SupplyOf RPC method. */ +export interface QuerySupplyOfResponse { + /** amount is the supply of the coin. */ + amount?: Coin; +} +/** QuerySupplyOfResponse is the response type for the Query/SupplyOf RPC method. */ +export interface QuerySupplyOfResponseSDKType { + amount?: CoinSDKType; +} +/** QueryParamsRequest defines the request type for querying x/bank parameters. */ +export interface QueryParamsRequest { +} +/** QueryParamsRequest defines the request type for querying x/bank parameters. */ +export interface QueryParamsRequestSDKType { +} +/** QueryParamsResponse defines the response type for querying x/bank parameters. */ +export interface QueryParamsResponse { + params?: Params; +} +/** QueryParamsResponse defines the response type for querying x/bank parameters. */ +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +/** QueryDenomsMetadataRequest is the request type for the Query/DenomsMetadata RPC method. */ +export interface QueryDenomsMetadataRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryDenomsMetadataRequest is the request type for the Query/DenomsMetadata RPC method. */ +export interface QueryDenomsMetadataRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryDenomsMetadataResponse is the response type for the Query/DenomsMetadata RPC + * method. + */ +export interface QueryDenomsMetadataResponse { + /** metadata provides the client information for all the registered tokens. */ + metadatas: Metadata[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryDenomsMetadataResponse is the response type for the Query/DenomsMetadata RPC + * method. + */ +export interface QueryDenomsMetadataResponseSDKType { + metadatas: MetadataSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryDenomMetadataRequest is the request type for the Query/DenomMetadata RPC method. */ +export interface QueryDenomMetadataRequest { + /** denom is the coin denom to query the metadata for. */ + denom: string; +} +/** QueryDenomMetadataRequest is the request type for the Query/DenomMetadata RPC method. */ +export interface QueryDenomMetadataRequestSDKType { + denom: string; +} +/** + * QueryDenomMetadataResponse is the response type for the Query/DenomMetadata RPC + * method. + */ +export interface QueryDenomMetadataResponse { + /** metadata describes and provides all the client information for the requested token. */ + metadata?: Metadata; +} +/** + * QueryDenomMetadataResponse is the response type for the Query/DenomMetadata RPC + * method. + */ +export interface QueryDenomMetadataResponseSDKType { + metadata?: MetadataSDKType; +} +/** + * QueryDenomOwnersRequest defines the request type for the DenomOwners RPC query, + * which queries for a paginated set of all account holders of a particular + * denomination. + */ +export interface QueryDenomOwnersRequest { + /** denom defines the coin denomination to query all account holders for. */ + denom: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryDenomOwnersRequest defines the request type for the DenomOwners RPC query, + * which queries for a paginated set of all account holders of a particular + * denomination. + */ +export interface QueryDenomOwnersRequestSDKType { + denom: string; + pagination?: PageRequestSDKType; +} +/** + * DenomOwner defines structure representing an account that owns or holds a + * particular denominated token. It contains the account address and account + * balance of the denominated token. + */ +export interface DenomOwner { + /** address defines the address that owns a particular denomination. */ + address: string; + /** balance is the balance of the denominated coin for an account. */ + balance?: Coin; +} +/** + * DenomOwner defines structure representing an account that owns or holds a + * particular denominated token. It contains the account address and account + * balance of the denominated token. + */ +export interface DenomOwnerSDKType { + address: string; + balance?: CoinSDKType; +} +/** QueryDenomOwnersResponse defines the RPC response of a DenomOwners RPC query. */ +export interface QueryDenomOwnersResponse { + denomOwners: DenomOwner[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryDenomOwnersResponse defines the RPC response of a DenomOwners RPC query. */ +export interface QueryDenomOwnersResponseSDKType { + denom_owners: DenomOwnerSDKType[]; + pagination?: PageResponseSDKType; +} +export declare const QueryBalanceRequest: { + encode(message: QueryBalanceRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryBalanceRequest; + fromPartial(object: DeepPartial): QueryBalanceRequest; +}; +export declare const QueryBalanceResponse: { + encode(message: QueryBalanceResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryBalanceResponse; + fromPartial(object: DeepPartial): QueryBalanceResponse; +}; +export declare const QueryAllBalancesRequest: { + encode(message: QueryAllBalancesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllBalancesRequest; + fromPartial(object: DeepPartial): QueryAllBalancesRequest; +}; +export declare const QueryAllBalancesResponse: { + encode(message: QueryAllBalancesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllBalancesResponse; + fromPartial(object: DeepPartial): QueryAllBalancesResponse; +}; +export declare const QuerySpendableBalancesRequest: { + encode(message: QuerySpendableBalancesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySpendableBalancesRequest; + fromPartial(object: DeepPartial): QuerySpendableBalancesRequest; +}; +export declare const QuerySpendableBalancesResponse: { + encode(message: QuerySpendableBalancesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySpendableBalancesResponse; + fromPartial(object: DeepPartial): QuerySpendableBalancesResponse; +}; +export declare const QueryTotalSupplyRequest: { + encode(message: QueryTotalSupplyRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTotalSupplyRequest; + fromPartial(object: DeepPartial): QueryTotalSupplyRequest; +}; +export declare const QueryTotalSupplyResponse: { + encode(message: QueryTotalSupplyResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTotalSupplyResponse; + fromPartial(object: DeepPartial): QueryTotalSupplyResponse; +}; +export declare const QuerySupplyOfRequest: { + encode(message: QuerySupplyOfRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySupplyOfRequest; + fromPartial(object: DeepPartial): QuerySupplyOfRequest; +}; +export declare const QuerySupplyOfResponse: { + encode(message: QuerySupplyOfResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySupplyOfResponse; + fromPartial(object: DeepPartial): QuerySupplyOfResponse; +}; +export declare const QueryParamsRequest: { + encode(_: QueryParamsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest; + fromPartial(_: DeepPartial): QueryParamsRequest; +}; +export declare const QueryParamsResponse: { + encode(message: QueryParamsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse; + fromPartial(object: DeepPartial): QueryParamsResponse; +}; +export declare const QueryDenomsMetadataRequest: { + encode(message: QueryDenomsMetadataRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomsMetadataRequest; + fromPartial(object: DeepPartial): QueryDenomsMetadataRequest; +}; +export declare const QueryDenomsMetadataResponse: { + encode(message: QueryDenomsMetadataResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomsMetadataResponse; + fromPartial(object: DeepPartial): QueryDenomsMetadataResponse; +}; +export declare const QueryDenomMetadataRequest: { + encode(message: QueryDenomMetadataRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomMetadataRequest; + fromPartial(object: DeepPartial): QueryDenomMetadataRequest; +}; +export declare const QueryDenomMetadataResponse: { + encode(message: QueryDenomMetadataResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomMetadataResponse; + fromPartial(object: DeepPartial): QueryDenomMetadataResponse; +}; +export declare const QueryDenomOwnersRequest: { + encode(message: QueryDenomOwnersRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomOwnersRequest; + fromPartial(object: DeepPartial): QueryDenomOwnersRequest; +}; +export declare const DenomOwner: { + encode(message: DenomOwner, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DenomOwner; + fromPartial(object: DeepPartial): DenomOwner; +}; +export declare const QueryDenomOwnersResponse: { + encode(message: QueryDenomOwnersResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomOwnersResponse; + fromPartial(object: DeepPartial): QueryDenomOwnersResponse; +}; diff --git a/packages/codegen/dist/cosmos/bank/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/bank/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..49fe501e --- /dev/null +++ b/packages/codegen/dist/cosmos/bank/v1beta1/query.lcd.d.ts @@ -0,0 +1,17 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryBalanceRequest, QueryBalanceResponseSDKType, QueryAllBalancesRequest, QueryAllBalancesResponseSDKType, QuerySpendableBalancesRequest, QuerySpendableBalancesResponseSDKType, QueryTotalSupplyRequest, QueryTotalSupplyResponseSDKType, QuerySupplyOfRequest, QuerySupplyOfResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType, QueryDenomMetadataRequest, QueryDenomMetadataResponseSDKType, QueryDenomsMetadataRequest, QueryDenomsMetadataResponseSDKType, QueryDenomOwnersRequest, QueryDenomOwnersResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + balance(params: QueryBalanceRequest): Promise; + allBalances(params: QueryAllBalancesRequest): Promise; + spendableBalances(params: QuerySpendableBalancesRequest): Promise; + totalSupply(params?: QueryTotalSupplyRequest): Promise; + supplyOf(params: QuerySupplyOfRequest): Promise; + params(_params?: QueryParamsRequest): Promise; + denomMetadata(params: QueryDenomMetadataRequest): Promise; + denomsMetadata(params?: QueryDenomsMetadataRequest): Promise; + denomOwners(params: QueryDenomOwnersRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/bank/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/bank/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..d746df09 --- /dev/null +++ b/packages/codegen/dist/cosmos/bank/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,57 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryBalanceRequest, QueryBalanceResponse, QueryAllBalancesRequest, QueryAllBalancesResponse, QuerySpendableBalancesRequest, QuerySpendableBalancesResponse, QueryTotalSupplyRequest, QueryTotalSupplyResponse, QuerySupplyOfRequest, QuerySupplyOfResponse, QueryParamsRequest, QueryParamsResponse, QueryDenomMetadataRequest, QueryDenomMetadataResponse, QueryDenomsMetadataRequest, QueryDenomsMetadataResponse, QueryDenomOwnersRequest, QueryDenomOwnersResponse } from "./query"; +/** Query defines the gRPC querier service. */ +export interface Query { + /** Balance queries the balance of a single coin for a single account. */ + balance(request: QueryBalanceRequest): Promise; + /** AllBalances queries the balance of all coins for a single account. */ + allBalances(request: QueryAllBalancesRequest): Promise; + /** + * SpendableBalances queries the spenable balance of all coins for a single + * account. + */ + spendableBalances(request: QuerySpendableBalancesRequest): Promise; + /** TotalSupply queries the total supply of all coins. */ + totalSupply(request?: QueryTotalSupplyRequest): Promise; + /** SupplyOf queries the supply of a single coin. */ + supplyOf(request: QuerySupplyOfRequest): Promise; + /** Params queries the parameters of x/bank module. */ + params(request?: QueryParamsRequest): Promise; + /** DenomsMetadata queries the client metadata of a given coin denomination. */ + denomMetadata(request: QueryDenomMetadataRequest): Promise; + /** + * DenomsMetadata queries the client metadata for all registered coin + * denominations. + */ + denomsMetadata(request?: QueryDenomsMetadataRequest): Promise; + /** + * DenomOwners queries for all account addresses that own a particular token + * denomination. + */ + denomOwners(request: QueryDenomOwnersRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + balance(request: QueryBalanceRequest): Promise; + allBalances(request: QueryAllBalancesRequest): Promise; + spendableBalances(request: QuerySpendableBalancesRequest): Promise; + totalSupply(request?: QueryTotalSupplyRequest): Promise; + supplyOf(request: QuerySupplyOfRequest): Promise; + params(request?: QueryParamsRequest): Promise; + denomMetadata(request: QueryDenomMetadataRequest): Promise; + denomsMetadata(request?: QueryDenomsMetadataRequest): Promise; + denomOwners(request: QueryDenomOwnersRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + balance(request: QueryBalanceRequest): Promise; + allBalances(request: QueryAllBalancesRequest): Promise; + spendableBalances(request: QuerySpendableBalancesRequest): Promise; + totalSupply(request?: QueryTotalSupplyRequest): Promise; + supplyOf(request: QuerySupplyOfRequest): Promise; + params(request?: QueryParamsRequest): Promise; + denomMetadata(request: QueryDenomMetadataRequest): Promise; + denomsMetadata(request?: QueryDenomsMetadataRequest): Promise; + denomOwners(request: QueryDenomOwnersRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/bank/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/bank/v1beta1/tx.d.ts new file mode 100644 index 00000000..d10ec379 --- /dev/null +++ b/packages/codegen/dist/cosmos/bank/v1beta1/tx.d.ts @@ -0,0 +1,58 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Input, InputSDKType, Output, OutputSDKType } from "./bank"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** MsgSend represents a message to send coins from one account to another. */ +export interface MsgSend { + fromAddress: string; + toAddress: string; + amount: Coin[]; +} +/** MsgSend represents a message to send coins from one account to another. */ +export interface MsgSendSDKType { + from_address: string; + to_address: string; + amount: CoinSDKType[]; +} +/** MsgSendResponse defines the Msg/Send response type. */ +export interface MsgSendResponse { +} +/** MsgSendResponse defines the Msg/Send response type. */ +export interface MsgSendResponseSDKType { +} +/** MsgMultiSend represents an arbitrary multi-in, multi-out send message. */ +export interface MsgMultiSend { + inputs: Input[]; + outputs: Output[]; +} +/** MsgMultiSend represents an arbitrary multi-in, multi-out send message. */ +export interface MsgMultiSendSDKType { + inputs: InputSDKType[]; + outputs: OutputSDKType[]; +} +/** MsgMultiSendResponse defines the Msg/MultiSend response type. */ +export interface MsgMultiSendResponse { +} +/** MsgMultiSendResponse defines the Msg/MultiSend response type. */ +export interface MsgMultiSendResponseSDKType { +} +export declare const MsgSend: { + encode(message: MsgSend, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSend; + fromPartial(object: DeepPartial): MsgSend; +}; +export declare const MsgSendResponse: { + encode(_: MsgSendResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSendResponse; + fromPartial(_: DeepPartial): MsgSendResponse; +}; +export declare const MsgMultiSend: { + encode(message: MsgMultiSend, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgMultiSend; + fromPartial(object: DeepPartial): MsgMultiSend; +}; +export declare const MsgMultiSendResponse: { + encode(_: MsgMultiSendResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgMultiSendResponse; + fromPartial(_: DeepPartial): MsgMultiSendResponse; +}; diff --git a/packages/codegen/dist/cosmos/bank/v1beta1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/bank/v1beta1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..ac12e6ab --- /dev/null +++ b/packages/codegen/dist/cosmos/bank/v1beta1/tx.rpc.msg.d.ts @@ -0,0 +1,15 @@ +import { Rpc } from "../../../helpers"; +import { MsgSend, MsgSendResponse, MsgMultiSend, MsgMultiSendResponse } from "./tx"; +/** Msg defines the bank Msg service. */ +export interface Msg { + /** Send defines a method for sending coins from one account to another account. */ + send(request: MsgSend): Promise; + /** MultiSend defines a method for sending coins from some accounts to other accounts. */ + multiSend(request: MsgMultiSend): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + send(request: MsgSend): Promise; + multiSend(request: MsgMultiSend): Promise; +} diff --git a/packages/codegen/dist/cosmos/base/abci/v1beta1/abci.d.ts b/packages/codegen/dist/cosmos/base/abci/v1beta1/abci.d.ts new file mode 100644 index 00000000..d83cf951 --- /dev/null +++ b/packages/codegen/dist/cosmos/base/abci/v1beta1/abci.d.ts @@ -0,0 +1,294 @@ +/// +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { Event, EventSDKType } from "../../../../tendermint/abci/types"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * TxResponse defines a structure containing relevant tx data and metadata. The + * tags are stringified and the log is JSON decoded. + */ +export interface TxResponse { + /** The block height */ + height: Long; + /** The transaction hash. */ + txhash: string; + /** Namespace for the Code */ + codespace: string; + /** Response code. */ + code: number; + /** Result bytes, if any. */ + data: string; + /** + * The output of the application's logger (raw string). May be + * non-deterministic. + */ + rawLog: string; + /** The output of the application's logger (typed). May be non-deterministic. */ + logs: ABCIMessageLog[]; + /** Additional information. May be non-deterministic. */ + info: string; + /** Amount of gas requested for transaction. */ + gasWanted: Long; + /** Amount of gas consumed by transaction. */ + gasUsed: Long; + /** The request transaction bytes. */ + tx?: Any; + /** + * Time of the previous block. For heights > 1, it's the weighted median of + * the timestamps of the valid votes in the block.LastCommit. For height == 1, + * it's genesis time. + */ + timestamp: string; + /** + * Events defines all the events emitted by processing a transaction. Note, + * these events include those emitted by processing all the messages and those + * emitted from the ante handler. Whereas Logs contains the events, with + * additional metadata, emitted only by processing the messages. + * + * Since: cosmos-sdk 0.42.11, 0.44.5, 0.45 + */ + events: Event[]; +} +/** + * TxResponse defines a structure containing relevant tx data and metadata. The + * tags are stringified and the log is JSON decoded. + */ +export interface TxResponseSDKType { + height: Long; + txhash: string; + codespace: string; + code: number; + data: string; + raw_log: string; + logs: ABCIMessageLogSDKType[]; + info: string; + gas_wanted: Long; + gas_used: Long; + tx?: AnySDKType; + timestamp: string; + events: EventSDKType[]; +} +/** ABCIMessageLog defines a structure containing an indexed tx ABCI message log. */ +export interface ABCIMessageLog { + msgIndex: number; + log: string; + /** + * Events contains a slice of Event objects that were emitted during some + * execution. + */ + events: StringEvent[]; +} +/** ABCIMessageLog defines a structure containing an indexed tx ABCI message log. */ +export interface ABCIMessageLogSDKType { + msg_index: number; + log: string; + events: StringEventSDKType[]; +} +/** + * StringEvent defines en Event object wrapper where all the attributes + * contain key/value pairs that are strings instead of raw bytes. + */ +export interface StringEvent { + type: string; + attributes: Attribute[]; +} +/** + * StringEvent defines en Event object wrapper where all the attributes + * contain key/value pairs that are strings instead of raw bytes. + */ +export interface StringEventSDKType { + type: string; + attributes: AttributeSDKType[]; +} +/** + * Attribute defines an attribute wrapper where the key and value are + * strings instead of raw bytes. + */ +export interface Attribute { + key: string; + value: string; +} +/** + * Attribute defines an attribute wrapper where the key and value are + * strings instead of raw bytes. + */ +export interface AttributeSDKType { + key: string; + value: string; +} +/** GasInfo defines tx execution gas context. */ +export interface GasInfo { + /** GasWanted is the maximum units of work we allow this tx to perform. */ + gasWanted: Long; + /** GasUsed is the amount of gas actually consumed. */ + gasUsed: Long; +} +/** GasInfo defines tx execution gas context. */ +export interface GasInfoSDKType { + gas_wanted: Long; + gas_used: Long; +} +/** Result is the union of ResponseFormat and ResponseCheckTx. */ +export interface Result { + /** + * Data is any data returned from message or handler execution. It MUST be + * length prefixed in order to separate data from multiple message executions. + * Deprecated. This field is still populated, but prefer msg_response instead + * because it also contains the Msg response typeURL. + */ + /** @deprecated */ + data: Uint8Array; + /** Log contains the log information from message or handler execution. */ + log: string; + /** + * Events contains a slice of Event objects that were emitted during message + * or handler execution. + */ + events: Event[]; + /** + * msg_responses contains the Msg handler responses type packed in Anys. + * + * Since: cosmos-sdk 0.46 + */ + msgResponses: Any[]; +} +/** Result is the union of ResponseFormat and ResponseCheckTx. */ +export interface ResultSDKType { + /** @deprecated */ + data: Uint8Array; + log: string; + events: EventSDKType[]; + msg_responses: AnySDKType[]; +} +/** + * SimulationResponse defines the response generated when a transaction is + * successfully simulated. + */ +export interface SimulationResponse { + gasInfo?: GasInfo; + result?: Result; +} +/** + * SimulationResponse defines the response generated when a transaction is + * successfully simulated. + */ +export interface SimulationResponseSDKType { + gas_info?: GasInfoSDKType; + result?: ResultSDKType; +} +/** + * MsgData defines the data returned in a Result object during message + * execution. + */ +/** @deprecated */ +export interface MsgData { + msgType: string; + data: Uint8Array; +} +/** + * MsgData defines the data returned in a Result object during message + * execution. + */ +/** @deprecated */ +export interface MsgDataSDKType { + msg_type: string; + data: Uint8Array; +} +/** + * TxMsgData defines a list of MsgData. A transaction will have a MsgData object + * for each message. + */ +export interface TxMsgData { + /** data field is deprecated and not populated. */ + /** @deprecated */ + data: MsgData[]; + /** + * msg_responses contains the Msg handler responses packed into Anys. + * + * Since: cosmos-sdk 0.46 + */ + msgResponses: Any[]; +} +/** + * TxMsgData defines a list of MsgData. A transaction will have a MsgData object + * for each message. + */ +export interface TxMsgDataSDKType { + /** @deprecated */ + data: MsgDataSDKType[]; + msg_responses: AnySDKType[]; +} +/** SearchTxsResult defines a structure for querying txs pageable */ +export interface SearchTxsResult { + /** Count of all txs */ + totalCount: Long; + /** Count of txs in current page */ + count: Long; + /** Index of current page, start from 1 */ + pageNumber: Long; + /** Count of total pages */ + pageTotal: Long; + /** Max count txs per page */ + limit: Long; + /** List of txs in current page */ + txs: TxResponse[]; +} +/** SearchTxsResult defines a structure for querying txs pageable */ +export interface SearchTxsResultSDKType { + total_count: Long; + count: Long; + page_number: Long; + page_total: Long; + limit: Long; + txs: TxResponseSDKType[]; +} +export declare const TxResponse: { + encode(message: TxResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TxResponse; + fromPartial(object: DeepPartial): TxResponse; +}; +export declare const ABCIMessageLog: { + encode(message: ABCIMessageLog, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ABCIMessageLog; + fromPartial(object: DeepPartial): ABCIMessageLog; +}; +export declare const StringEvent: { + encode(message: StringEvent, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): StringEvent; + fromPartial(object: DeepPartial): StringEvent; +}; +export declare const Attribute: { + encode(message: Attribute, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Attribute; + fromPartial(object: DeepPartial): Attribute; +}; +export declare const GasInfo: { + encode(message: GasInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GasInfo; + fromPartial(object: DeepPartial): GasInfo; +}; +export declare const Result: { + encode(message: Result, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Result; + fromPartial(object: DeepPartial): Result; +}; +export declare const SimulationResponse: { + encode(message: SimulationResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SimulationResponse; + fromPartial(object: DeepPartial): SimulationResponse; +}; +export declare const MsgData: { + encode(message: MsgData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgData; + fromPartial(object: DeepPartial): MsgData; +}; +export declare const TxMsgData: { + encode(message: TxMsgData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TxMsgData; + fromPartial(object: DeepPartial): TxMsgData; +}; +export declare const SearchTxsResult: { + encode(message: SearchTxsResult, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SearchTxsResult; + fromPartial(object: DeepPartial): SearchTxsResult; +}; diff --git a/packages/codegen/dist/cosmos/base/kv/v1beta1/kv.d.ts b/packages/codegen/dist/cosmos/base/kv/v1beta1/kv.d.ts new file mode 100644 index 00000000..9f6271ce --- /dev/null +++ b/packages/codegen/dist/cosmos/base/kv/v1beta1/kv.d.ts @@ -0,0 +1,30 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** Pairs defines a repeated slice of Pair objects. */ +export interface Pairs { + pairs: Pair[]; +} +/** Pairs defines a repeated slice of Pair objects. */ +export interface PairsSDKType { + pairs: PairSDKType[]; +} +/** Pair defines a key/value bytes tuple. */ +export interface Pair { + key: Uint8Array; + value: Uint8Array; +} +/** Pair defines a key/value bytes tuple. */ +export interface PairSDKType { + key: Uint8Array; + value: Uint8Array; +} +export declare const Pairs: { + encode(message: Pairs, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Pairs; + fromPartial(object: DeepPartial): Pairs; +}; +export declare const Pair: { + encode(message: Pair, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Pair; + fromPartial(object: DeepPartial): Pair; +}; diff --git a/packages/codegen/dist/cosmos/base/query/v1beta1/pagination.d.ts b/packages/codegen/dist/cosmos/base/query/v1beta1/pagination.d.ts new file mode 100644 index 00000000..cc13d8d7 --- /dev/null +++ b/packages/codegen/dist/cosmos/base/query/v1beta1/pagination.d.ts @@ -0,0 +1,105 @@ +/// +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: Long; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: Long; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse: boolean; +} +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequestSDKType { + key: Uint8Array; + offset: Long; + limit: Long; + count_total: boolean; + reverse: boolean; +} +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: Long; +} +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponseSDKType { + next_key: Uint8Array; + total: Long; +} +export declare const PageRequest: { + encode(message: PageRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest; + fromPartial(object: DeepPartial): PageRequest; +}; +export declare const PageResponse: { + encode(message: PageResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse; + fromPartial(object: DeepPartial): PageResponse; +}; diff --git a/packages/codegen/dist/cosmos/base/reflection/v1beta1/reflection.d.ts b/packages/codegen/dist/cosmos/base/reflection/v1beta1/reflection.d.ts new file mode 100644 index 00000000..afa8751c --- /dev/null +++ b/packages/codegen/dist/cosmos/base/reflection/v1beta1/reflection.d.ts @@ -0,0 +1,66 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** ListAllInterfacesRequest is the request type of the ListAllInterfaces RPC. */ +export interface ListAllInterfacesRequest { +} +/** ListAllInterfacesRequest is the request type of the ListAllInterfaces RPC. */ +export interface ListAllInterfacesRequestSDKType { +} +/** ListAllInterfacesResponse is the response type of the ListAllInterfaces RPC. */ +export interface ListAllInterfacesResponse { + /** interface_names is an array of all the registered interfaces. */ + interfaceNames: string[]; +} +/** ListAllInterfacesResponse is the response type of the ListAllInterfaces RPC. */ +export interface ListAllInterfacesResponseSDKType { + interface_names: string[]; +} +/** + * ListImplementationsRequest is the request type of the ListImplementations + * RPC. + */ +export interface ListImplementationsRequest { + /** interface_name defines the interface to query the implementations for. */ + interfaceName: string; +} +/** + * ListImplementationsRequest is the request type of the ListImplementations + * RPC. + */ +export interface ListImplementationsRequestSDKType { + interface_name: string; +} +/** + * ListImplementationsResponse is the response type of the ListImplementations + * RPC. + */ +export interface ListImplementationsResponse { + implementationMessageNames: string[]; +} +/** + * ListImplementationsResponse is the response type of the ListImplementations + * RPC. + */ +export interface ListImplementationsResponseSDKType { + implementation_message_names: string[]; +} +export declare const ListAllInterfacesRequest: { + encode(_: ListAllInterfacesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ListAllInterfacesRequest; + fromPartial(_: DeepPartial): ListAllInterfacesRequest; +}; +export declare const ListAllInterfacesResponse: { + encode(message: ListAllInterfacesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ListAllInterfacesResponse; + fromPartial(object: DeepPartial): ListAllInterfacesResponse; +}; +export declare const ListImplementationsRequest: { + encode(message: ListImplementationsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ListImplementationsRequest; + fromPartial(object: DeepPartial): ListImplementationsRequest; +}; +export declare const ListImplementationsResponse: { + encode(message: ListImplementationsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ListImplementationsResponse; + fromPartial(object: DeepPartial): ListImplementationsResponse; +}; diff --git a/packages/codegen/dist/cosmos/base/reflection/v2alpha1/reflection.d.ts b/packages/codegen/dist/cosmos/base/reflection/v2alpha1/reflection.d.ts new file mode 100644 index 00000000..77ddfda7 --- /dev/null +++ b/packages/codegen/dist/cosmos/base/reflection/v2alpha1/reflection.d.ts @@ -0,0 +1,451 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** AppDescriptor describes a cosmos-sdk based application */ +export interface AppDescriptor { + /** + * AuthnDescriptor provides information on how to authenticate transactions on the application + * NOTE: experimental and subject to change in future releases. + */ + authn?: AuthnDescriptor; + /** chain provides the chain descriptor */ + chain?: ChainDescriptor; + /** codec provides metadata information regarding codec related types */ + codec?: CodecDescriptor; + /** configuration provides metadata information regarding the sdk.Config type */ + configuration?: ConfigurationDescriptor; + /** query_services provides metadata information regarding the available queriable endpoints */ + queryServices?: QueryServicesDescriptor; + /** tx provides metadata information regarding how to send transactions to the given application */ + tx?: TxDescriptor; +} +/** AppDescriptor describes a cosmos-sdk based application */ +export interface AppDescriptorSDKType { + authn?: AuthnDescriptorSDKType; + chain?: ChainDescriptorSDKType; + codec?: CodecDescriptorSDKType; + configuration?: ConfigurationDescriptorSDKType; + query_services?: QueryServicesDescriptorSDKType; + tx?: TxDescriptorSDKType; +} +/** TxDescriptor describes the accepted transaction type */ +export interface TxDescriptor { + /** + * fullname is the protobuf fullname of the raw transaction type (for instance the tx.Tx type) + * it is not meant to support polymorphism of transaction types, it is supposed to be used by + * reflection clients to understand if they can handle a specific transaction type in an application. + */ + fullname: string; + /** msgs lists the accepted application messages (sdk.Msg) */ + msgs: MsgDescriptor[]; +} +/** TxDescriptor describes the accepted transaction type */ +export interface TxDescriptorSDKType { + fullname: string; + msgs: MsgDescriptorSDKType[]; +} +/** + * AuthnDescriptor provides information on how to sign transactions without relying + * on the online RPCs GetTxMetadata and CombineUnsignedTxAndSignatures + */ +export interface AuthnDescriptor { + /** sign_modes defines the supported signature algorithm */ + signModes: SigningModeDescriptor[]; +} +/** + * AuthnDescriptor provides information on how to sign transactions without relying + * on the online RPCs GetTxMetadata and CombineUnsignedTxAndSignatures + */ +export interface AuthnDescriptorSDKType { + sign_modes: SigningModeDescriptorSDKType[]; +} +/** + * SigningModeDescriptor provides information on a signing flow of the application + * NOTE(fdymylja): here we could go as far as providing an entire flow on how + * to sign a message given a SigningModeDescriptor, but it's better to think about + * this another time + */ +export interface SigningModeDescriptor { + /** name defines the unique name of the signing mode */ + name: string; + /** number is the unique int32 identifier for the sign_mode enum */ + number: number; + /** + * authn_info_provider_method_fullname defines the fullname of the method to call to get + * the metadata required to authenticate using the provided sign_modes + */ + authnInfoProviderMethodFullname: string; +} +/** + * SigningModeDescriptor provides information on a signing flow of the application + * NOTE(fdymylja): here we could go as far as providing an entire flow on how + * to sign a message given a SigningModeDescriptor, but it's better to think about + * this another time + */ +export interface SigningModeDescriptorSDKType { + name: string; + number: number; + authn_info_provider_method_fullname: string; +} +/** ChainDescriptor describes chain information of the application */ +export interface ChainDescriptor { + /** id is the chain id */ + id: string; +} +/** ChainDescriptor describes chain information of the application */ +export interface ChainDescriptorSDKType { + id: string; +} +/** CodecDescriptor describes the registered interfaces and provides metadata information on the types */ +export interface CodecDescriptor { + /** interfaces is a list of the registerted interfaces descriptors */ + interfaces: InterfaceDescriptor[]; +} +/** CodecDescriptor describes the registered interfaces and provides metadata information on the types */ +export interface CodecDescriptorSDKType { + interfaces: InterfaceDescriptorSDKType[]; +} +/** InterfaceDescriptor describes the implementation of an interface */ +export interface InterfaceDescriptor { + /** fullname is the name of the interface */ + fullname: string; + /** + * interface_accepting_messages contains information regarding the proto messages which contain the interface as + * google.protobuf.Any field + */ + interfaceAcceptingMessages: InterfaceAcceptingMessageDescriptor[]; + /** interface_implementers is a list of the descriptors of the interface implementers */ + interfaceImplementers: InterfaceImplementerDescriptor[]; +} +/** InterfaceDescriptor describes the implementation of an interface */ +export interface InterfaceDescriptorSDKType { + fullname: string; + interface_accepting_messages: InterfaceAcceptingMessageDescriptorSDKType[]; + interface_implementers: InterfaceImplementerDescriptorSDKType[]; +} +/** InterfaceImplementerDescriptor describes an interface implementer */ +export interface InterfaceImplementerDescriptor { + /** fullname is the protobuf queryable name of the interface implementer */ + fullname: string; + /** + * type_url defines the type URL used when marshalling the type as any + * this is required so we can provide type safe google.protobuf.Any marshalling and + * unmarshalling, making sure that we don't accept just 'any' type + * in our interface fields + */ + typeUrl: string; +} +/** InterfaceImplementerDescriptor describes an interface implementer */ +export interface InterfaceImplementerDescriptorSDKType { + fullname: string; + type_url: string; +} +/** + * InterfaceAcceptingMessageDescriptor describes a protobuf message which contains + * an interface represented as a google.protobuf.Any + */ +export interface InterfaceAcceptingMessageDescriptor { + /** fullname is the protobuf fullname of the type containing the interface */ + fullname: string; + /** + * field_descriptor_names is a list of the protobuf name (not fullname) of the field + * which contains the interface as google.protobuf.Any (the interface is the same, but + * it can be in multiple fields of the same proto message) + */ + fieldDescriptorNames: string[]; +} +/** + * InterfaceAcceptingMessageDescriptor describes a protobuf message which contains + * an interface represented as a google.protobuf.Any + */ +export interface InterfaceAcceptingMessageDescriptorSDKType { + fullname: string; + field_descriptor_names: string[]; +} +/** ConfigurationDescriptor contains metadata information on the sdk.Config */ +export interface ConfigurationDescriptor { + /** bech32_account_address_prefix is the account address prefix */ + bech32AccountAddressPrefix: string; +} +/** ConfigurationDescriptor contains metadata information on the sdk.Config */ +export interface ConfigurationDescriptorSDKType { + bech32_account_address_prefix: string; +} +/** MsgDescriptor describes a cosmos-sdk message that can be delivered with a transaction */ +export interface MsgDescriptor { + /** msg_type_url contains the TypeURL of a sdk.Msg. */ + msgTypeUrl: string; +} +/** MsgDescriptor describes a cosmos-sdk message that can be delivered with a transaction */ +export interface MsgDescriptorSDKType { + msg_type_url: string; +} +/** GetAuthnDescriptorRequest is the request used for the GetAuthnDescriptor RPC */ +export interface GetAuthnDescriptorRequest { +} +/** GetAuthnDescriptorRequest is the request used for the GetAuthnDescriptor RPC */ +export interface GetAuthnDescriptorRequestSDKType { +} +/** GetAuthnDescriptorResponse is the response returned by the GetAuthnDescriptor RPC */ +export interface GetAuthnDescriptorResponse { + /** authn describes how to authenticate to the application when sending transactions */ + authn?: AuthnDescriptor; +} +/** GetAuthnDescriptorResponse is the response returned by the GetAuthnDescriptor RPC */ +export interface GetAuthnDescriptorResponseSDKType { + authn?: AuthnDescriptorSDKType; +} +/** GetChainDescriptorRequest is the request used for the GetChainDescriptor RPC */ +export interface GetChainDescriptorRequest { +} +/** GetChainDescriptorRequest is the request used for the GetChainDescriptor RPC */ +export interface GetChainDescriptorRequestSDKType { +} +/** GetChainDescriptorResponse is the response returned by the GetChainDescriptor RPC */ +export interface GetChainDescriptorResponse { + /** chain describes application chain information */ + chain?: ChainDescriptor; +} +/** GetChainDescriptorResponse is the response returned by the GetChainDescriptor RPC */ +export interface GetChainDescriptorResponseSDKType { + chain?: ChainDescriptorSDKType; +} +/** GetCodecDescriptorRequest is the request used for the GetCodecDescriptor RPC */ +export interface GetCodecDescriptorRequest { +} +/** GetCodecDescriptorRequest is the request used for the GetCodecDescriptor RPC */ +export interface GetCodecDescriptorRequestSDKType { +} +/** GetCodecDescriptorResponse is the response returned by the GetCodecDescriptor RPC */ +export interface GetCodecDescriptorResponse { + /** codec describes the application codec such as registered interfaces and implementations */ + codec?: CodecDescriptor; +} +/** GetCodecDescriptorResponse is the response returned by the GetCodecDescriptor RPC */ +export interface GetCodecDescriptorResponseSDKType { + codec?: CodecDescriptorSDKType; +} +/** GetConfigurationDescriptorRequest is the request used for the GetConfigurationDescriptor RPC */ +export interface GetConfigurationDescriptorRequest { +} +/** GetConfigurationDescriptorRequest is the request used for the GetConfigurationDescriptor RPC */ +export interface GetConfigurationDescriptorRequestSDKType { +} +/** GetConfigurationDescriptorResponse is the response returned by the GetConfigurationDescriptor RPC */ +export interface GetConfigurationDescriptorResponse { + /** config describes the application's sdk.Config */ + config?: ConfigurationDescriptor; +} +/** GetConfigurationDescriptorResponse is the response returned by the GetConfigurationDescriptor RPC */ +export interface GetConfigurationDescriptorResponseSDKType { + config?: ConfigurationDescriptorSDKType; +} +/** GetQueryServicesDescriptorRequest is the request used for the GetQueryServicesDescriptor RPC */ +export interface GetQueryServicesDescriptorRequest { +} +/** GetQueryServicesDescriptorRequest is the request used for the GetQueryServicesDescriptor RPC */ +export interface GetQueryServicesDescriptorRequestSDKType { +} +/** GetQueryServicesDescriptorResponse is the response returned by the GetQueryServicesDescriptor RPC */ +export interface GetQueryServicesDescriptorResponse { + /** queries provides information on the available queryable services */ + queries?: QueryServicesDescriptor; +} +/** GetQueryServicesDescriptorResponse is the response returned by the GetQueryServicesDescriptor RPC */ +export interface GetQueryServicesDescriptorResponseSDKType { + queries?: QueryServicesDescriptorSDKType; +} +/** GetTxDescriptorRequest is the request used for the GetTxDescriptor RPC */ +export interface GetTxDescriptorRequest { +} +/** GetTxDescriptorRequest is the request used for the GetTxDescriptor RPC */ +export interface GetTxDescriptorRequestSDKType { +} +/** GetTxDescriptorResponse is the response returned by the GetTxDescriptor RPC */ +export interface GetTxDescriptorResponse { + /** + * tx provides information on msgs that can be forwarded to the application + * alongside the accepted transaction protobuf type + */ + tx?: TxDescriptor; +} +/** GetTxDescriptorResponse is the response returned by the GetTxDescriptor RPC */ +export interface GetTxDescriptorResponseSDKType { + tx?: TxDescriptorSDKType; +} +/** QueryServicesDescriptor contains the list of cosmos-sdk queriable services */ +export interface QueryServicesDescriptor { + /** query_services is a list of cosmos-sdk QueryServiceDescriptor */ + queryServices: QueryServiceDescriptor[]; +} +/** QueryServicesDescriptor contains the list of cosmos-sdk queriable services */ +export interface QueryServicesDescriptorSDKType { + query_services: QueryServiceDescriptorSDKType[]; +} +/** QueryServiceDescriptor describes a cosmos-sdk queryable service */ +export interface QueryServiceDescriptor { + /** fullname is the protobuf fullname of the service descriptor */ + fullname: string; + /** is_module describes if this service is actually exposed by an application's module */ + isModule: boolean; + /** methods provides a list of query service methods */ + methods: QueryMethodDescriptor[]; +} +/** QueryServiceDescriptor describes a cosmos-sdk queryable service */ +export interface QueryServiceDescriptorSDKType { + fullname: string; + is_module: boolean; + methods: QueryMethodDescriptorSDKType[]; +} +/** + * QueryMethodDescriptor describes a queryable method of a query service + * no other info is provided beside method name and tendermint queryable path + * because it would be redundant with the grpc reflection service + */ +export interface QueryMethodDescriptor { + /** name is the protobuf name (not fullname) of the method */ + name: string; + /** + * full_query_path is the path that can be used to query + * this method via tendermint abci.Query + */ + fullQueryPath: string; +} +/** + * QueryMethodDescriptor describes a queryable method of a query service + * no other info is provided beside method name and tendermint queryable path + * because it would be redundant with the grpc reflection service + */ +export interface QueryMethodDescriptorSDKType { + name: string; + full_query_path: string; +} +export declare const AppDescriptor: { + encode(message: AppDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AppDescriptor; + fromPartial(object: DeepPartial): AppDescriptor; +}; +export declare const TxDescriptor: { + encode(message: TxDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TxDescriptor; + fromPartial(object: DeepPartial): TxDescriptor; +}; +export declare const AuthnDescriptor: { + encode(message: AuthnDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AuthnDescriptor; + fromPartial(object: DeepPartial): AuthnDescriptor; +}; +export declare const SigningModeDescriptor: { + encode(message: SigningModeDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SigningModeDescriptor; + fromPartial(object: DeepPartial): SigningModeDescriptor; +}; +export declare const ChainDescriptor: { + encode(message: ChainDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ChainDescriptor; + fromPartial(object: DeepPartial): ChainDescriptor; +}; +export declare const CodecDescriptor: { + encode(message: CodecDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CodecDescriptor; + fromPartial(object: DeepPartial): CodecDescriptor; +}; +export declare const InterfaceDescriptor: { + encode(message: InterfaceDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor; + fromPartial(object: DeepPartial): InterfaceDescriptor; +}; +export declare const InterfaceImplementerDescriptor: { + encode(message: InterfaceImplementerDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceImplementerDescriptor; + fromPartial(object: DeepPartial): InterfaceImplementerDescriptor; +}; +export declare const InterfaceAcceptingMessageDescriptor: { + encode(message: InterfaceAcceptingMessageDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceAcceptingMessageDescriptor; + fromPartial(object: DeepPartial): InterfaceAcceptingMessageDescriptor; +}; +export declare const ConfigurationDescriptor: { + encode(message: ConfigurationDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConfigurationDescriptor; + fromPartial(object: DeepPartial): ConfigurationDescriptor; +}; +export declare const MsgDescriptor: { + encode(message: MsgDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDescriptor; + fromPartial(object: DeepPartial): MsgDescriptor; +}; +export declare const GetAuthnDescriptorRequest: { + encode(_: GetAuthnDescriptorRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetAuthnDescriptorRequest; + fromPartial(_: DeepPartial): GetAuthnDescriptorRequest; +}; +export declare const GetAuthnDescriptorResponse: { + encode(message: GetAuthnDescriptorResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetAuthnDescriptorResponse; + fromPartial(object: DeepPartial): GetAuthnDescriptorResponse; +}; +export declare const GetChainDescriptorRequest: { + encode(_: GetChainDescriptorRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetChainDescriptorRequest; + fromPartial(_: DeepPartial): GetChainDescriptorRequest; +}; +export declare const GetChainDescriptorResponse: { + encode(message: GetChainDescriptorResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetChainDescriptorResponse; + fromPartial(object: DeepPartial): GetChainDescriptorResponse; +}; +export declare const GetCodecDescriptorRequest: { + encode(_: GetCodecDescriptorRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetCodecDescriptorRequest; + fromPartial(_: DeepPartial): GetCodecDescriptorRequest; +}; +export declare const GetCodecDescriptorResponse: { + encode(message: GetCodecDescriptorResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetCodecDescriptorResponse; + fromPartial(object: DeepPartial): GetCodecDescriptorResponse; +}; +export declare const GetConfigurationDescriptorRequest: { + encode(_: GetConfigurationDescriptorRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetConfigurationDescriptorRequest; + fromPartial(_: DeepPartial): GetConfigurationDescriptorRequest; +}; +export declare const GetConfigurationDescriptorResponse: { + encode(message: GetConfigurationDescriptorResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetConfigurationDescriptorResponse; + fromPartial(object: DeepPartial): GetConfigurationDescriptorResponse; +}; +export declare const GetQueryServicesDescriptorRequest: { + encode(_: GetQueryServicesDescriptorRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetQueryServicesDescriptorRequest; + fromPartial(_: DeepPartial): GetQueryServicesDescriptorRequest; +}; +export declare const GetQueryServicesDescriptorResponse: { + encode(message: GetQueryServicesDescriptorResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetQueryServicesDescriptorResponse; + fromPartial(object: DeepPartial): GetQueryServicesDescriptorResponse; +}; +export declare const GetTxDescriptorRequest: { + encode(_: GetTxDescriptorRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxDescriptorRequest; + fromPartial(_: DeepPartial): GetTxDescriptorRequest; +}; +export declare const GetTxDescriptorResponse: { + encode(message: GetTxDescriptorResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxDescriptorResponse; + fromPartial(object: DeepPartial): GetTxDescriptorResponse; +}; +export declare const QueryServicesDescriptor: { + encode(message: QueryServicesDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryServicesDescriptor; + fromPartial(object: DeepPartial): QueryServicesDescriptor; +}; +export declare const QueryServiceDescriptor: { + encode(message: QueryServiceDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryServiceDescriptor; + fromPartial(object: DeepPartial): QueryServiceDescriptor; +}; +export declare const QueryMethodDescriptor: { + encode(message: QueryMethodDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMethodDescriptor; + fromPartial(object: DeepPartial): QueryMethodDescriptor; +}; diff --git a/packages/codegen/dist/cosmos/base/snapshots/v1beta1/snapshot.d.ts b/packages/codegen/dist/cosmos/base/snapshots/v1beta1/snapshot.d.ts new file mode 100644 index 00000000..a771a8bf --- /dev/null +++ b/packages/codegen/dist/cosmos/base/snapshots/v1beta1/snapshot.d.ts @@ -0,0 +1,151 @@ +/// +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** Snapshot contains Tendermint state sync snapshot info. */ +export interface Snapshot { + height: Long; + format: number; + chunks: number; + hash: Uint8Array; + metadata?: Metadata; +} +/** Snapshot contains Tendermint state sync snapshot info. */ +export interface SnapshotSDKType { + height: Long; + format: number; + chunks: number; + hash: Uint8Array; + metadata?: MetadataSDKType; +} +/** Metadata contains SDK-specific snapshot metadata. */ +export interface Metadata { + /** SHA-256 chunk hashes */ + chunkHashes: Uint8Array[]; +} +/** Metadata contains SDK-specific snapshot metadata. */ +export interface MetadataSDKType { + chunk_hashes: Uint8Array[]; +} +/** SnapshotItem is an item contained in a rootmulti.Store snapshot. */ +export interface SnapshotItem { + store?: SnapshotStoreItem; + iavl?: SnapshotIAVLItem; + extension?: SnapshotExtensionMeta; + extensionPayload?: SnapshotExtensionPayload; + kv?: SnapshotKVItem; + schema?: SnapshotSchema; +} +/** SnapshotItem is an item contained in a rootmulti.Store snapshot. */ +export interface SnapshotItemSDKType { + store?: SnapshotStoreItemSDKType; + iavl?: SnapshotIAVLItemSDKType; + extension?: SnapshotExtensionMetaSDKType; + extension_payload?: SnapshotExtensionPayloadSDKType; + kv?: SnapshotKVItemSDKType; + schema?: SnapshotSchemaSDKType; +} +/** SnapshotStoreItem contains metadata about a snapshotted store. */ +export interface SnapshotStoreItem { + name: string; +} +/** SnapshotStoreItem contains metadata about a snapshotted store. */ +export interface SnapshotStoreItemSDKType { + name: string; +} +/** SnapshotIAVLItem is an exported IAVL node. */ +export interface SnapshotIAVLItem { + key: Uint8Array; + value: Uint8Array; + /** version is block height */ + version: Long; + /** height is depth of the tree. */ + height: number; +} +/** SnapshotIAVLItem is an exported IAVL node. */ +export interface SnapshotIAVLItemSDKType { + key: Uint8Array; + value: Uint8Array; + version: Long; + height: number; +} +/** SnapshotExtensionMeta contains metadata about an external snapshotter. */ +export interface SnapshotExtensionMeta { + name: string; + format: number; +} +/** SnapshotExtensionMeta contains metadata about an external snapshotter. */ +export interface SnapshotExtensionMetaSDKType { + name: string; + format: number; +} +/** SnapshotExtensionPayload contains payloads of an external snapshotter. */ +export interface SnapshotExtensionPayload { + payload: Uint8Array; +} +/** SnapshotExtensionPayload contains payloads of an external snapshotter. */ +export interface SnapshotExtensionPayloadSDKType { + payload: Uint8Array; +} +/** SnapshotKVItem is an exported Key/Value Pair */ +export interface SnapshotKVItem { + key: Uint8Array; + value: Uint8Array; +} +/** SnapshotKVItem is an exported Key/Value Pair */ +export interface SnapshotKVItemSDKType { + key: Uint8Array; + value: Uint8Array; +} +/** SnapshotSchema is an exported schema of smt store */ +export interface SnapshotSchema { + keys: Uint8Array[]; +} +/** SnapshotSchema is an exported schema of smt store */ +export interface SnapshotSchemaSDKType { + keys: Uint8Array[]; +} +export declare const Snapshot: { + encode(message: Snapshot, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Snapshot; + fromPartial(object: DeepPartial): Snapshot; +}; +export declare const Metadata: { + encode(message: Metadata, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Metadata; + fromPartial(object: DeepPartial): Metadata; +}; +export declare const SnapshotItem: { + encode(message: SnapshotItem, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotItem; + fromPartial(object: DeepPartial): SnapshotItem; +}; +export declare const SnapshotStoreItem: { + encode(message: SnapshotStoreItem, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotStoreItem; + fromPartial(object: DeepPartial): SnapshotStoreItem; +}; +export declare const SnapshotIAVLItem: { + encode(message: SnapshotIAVLItem, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotIAVLItem; + fromPartial(object: DeepPartial): SnapshotIAVLItem; +}; +export declare const SnapshotExtensionMeta: { + encode(message: SnapshotExtensionMeta, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotExtensionMeta; + fromPartial(object: DeepPartial): SnapshotExtensionMeta; +}; +export declare const SnapshotExtensionPayload: { + encode(message: SnapshotExtensionPayload, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotExtensionPayload; + fromPartial(object: DeepPartial): SnapshotExtensionPayload; +}; +export declare const SnapshotKVItem: { + encode(message: SnapshotKVItem, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotKVItem; + fromPartial(object: DeepPartial): SnapshotKVItem; +}; +export declare const SnapshotSchema: { + encode(message: SnapshotSchema, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotSchema; + fromPartial(object: DeepPartial): SnapshotSchema; +}; diff --git a/packages/codegen/dist/cosmos/base/store/v1beta1/commit_info.d.ts b/packages/codegen/dist/cosmos/base/store/v1beta1/commit_info.d.ts new file mode 100644 index 00000000..6870078b --- /dev/null +++ b/packages/codegen/dist/cosmos/base/store/v1beta1/commit_info.d.ts @@ -0,0 +1,66 @@ +/// +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * CommitInfo defines commit information used by the multi-store when committing + * a version/height. + */ +export interface CommitInfo { + version: Long; + storeInfos: StoreInfo[]; +} +/** + * CommitInfo defines commit information used by the multi-store when committing + * a version/height. + */ +export interface CommitInfoSDKType { + version: Long; + store_infos: StoreInfoSDKType[]; +} +/** + * StoreInfo defines store-specific commit information. It contains a reference + * between a store name and the commit ID. + */ +export interface StoreInfo { + name: string; + commitId?: CommitID; +} +/** + * StoreInfo defines store-specific commit information. It contains a reference + * between a store name and the commit ID. + */ +export interface StoreInfoSDKType { + name: string; + commit_id?: CommitIDSDKType; +} +/** + * CommitID defines the committment information when a specific store is + * committed. + */ +export interface CommitID { + version: Long; + hash: Uint8Array; +} +/** + * CommitID defines the committment information when a specific store is + * committed. + */ +export interface CommitIDSDKType { + version: Long; + hash: Uint8Array; +} +export declare const CommitInfo: { + encode(message: CommitInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CommitInfo; + fromPartial(object: DeepPartial): CommitInfo; +}; +export declare const StoreInfo: { + encode(message: StoreInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): StoreInfo; + fromPartial(object: DeepPartial): StoreInfo; +}; +export declare const CommitID: { + encode(message: CommitID, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CommitID; + fromPartial(object: DeepPartial): CommitID; +}; diff --git a/packages/codegen/dist/cosmos/base/store/v1beta1/listening.d.ts b/packages/codegen/dist/cosmos/base/store/v1beta1/listening.d.ts new file mode 100644 index 00000000..463f289b --- /dev/null +++ b/packages/codegen/dist/cosmos/base/store/v1beta1/listening.d.ts @@ -0,0 +1,35 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * StoreKVPair is a KVStore KVPair used for listening to state changes (Sets and Deletes) + * It optionally includes the StoreKey for the originating KVStore and a Boolean flag to distinguish between Sets and + * Deletes + * + * Since: cosmos-sdk 0.43 + */ +export interface StoreKVPair { + /** the store key for the KVStore this pair originates from */ + storeKey: string; + /** true indicates a delete operation, false indicates a set operation */ + delete: boolean; + key: Uint8Array; + value: Uint8Array; +} +/** + * StoreKVPair is a KVStore KVPair used for listening to state changes (Sets and Deletes) + * It optionally includes the StoreKey for the originating KVStore and a Boolean flag to distinguish between Sets and + * Deletes + * + * Since: cosmos-sdk 0.43 + */ +export interface StoreKVPairSDKType { + store_key: string; + delete: boolean; + key: Uint8Array; + value: Uint8Array; +} +export declare const StoreKVPair: { + encode(message: StoreKVPair, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): StoreKVPair; + fromPartial(object: DeepPartial): StoreKVPair; +}; diff --git a/packages/codegen/dist/cosmos/base/tendermint/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/base/tendermint/v1beta1/query.d.ts new file mode 100644 index 00000000..c5c1d545 --- /dev/null +++ b/packages/codegen/dist/cosmos/base/tendermint/v1beta1/query.d.ts @@ -0,0 +1,245 @@ +/// +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../query/v1beta1/pagination"; +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { BlockID, BlockIDSDKType } from "../../../../tendermint/types/types"; +import { Block, BlockSDKType } from "../../../../tendermint/types/block"; +import { NodeInfo, NodeInfoSDKType } from "../../../../tendermint/p2p/types"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GetValidatorSetByHeightRequest is the request type for the Query/GetValidatorSetByHeight RPC method. */ +export interface GetValidatorSetByHeightRequest { + height: Long; + /** pagination defines an pagination for the request. */ + pagination?: PageRequest; +} +/** GetValidatorSetByHeightRequest is the request type for the Query/GetValidatorSetByHeight RPC method. */ +export interface GetValidatorSetByHeightRequestSDKType { + height: Long; + pagination?: PageRequestSDKType; +} +/** GetValidatorSetByHeightResponse is the response type for the Query/GetValidatorSetByHeight RPC method. */ +export interface GetValidatorSetByHeightResponse { + blockHeight: Long; + validators: Validator[]; + /** pagination defines an pagination for the response. */ + pagination?: PageResponse; +} +/** GetValidatorSetByHeightResponse is the response type for the Query/GetValidatorSetByHeight RPC method. */ +export interface GetValidatorSetByHeightResponseSDKType { + block_height: Long; + validators: ValidatorSDKType[]; + pagination?: PageResponseSDKType; +} +/** GetLatestValidatorSetRequest is the request type for the Query/GetValidatorSetByHeight RPC method. */ +export interface GetLatestValidatorSetRequest { + /** pagination defines an pagination for the request. */ + pagination?: PageRequest; +} +/** GetLatestValidatorSetRequest is the request type for the Query/GetValidatorSetByHeight RPC method. */ +export interface GetLatestValidatorSetRequestSDKType { + pagination?: PageRequestSDKType; +} +/** GetLatestValidatorSetResponse is the response type for the Query/GetValidatorSetByHeight RPC method. */ +export interface GetLatestValidatorSetResponse { + blockHeight: Long; + validators: Validator[]; + /** pagination defines an pagination for the response. */ + pagination?: PageResponse; +} +/** GetLatestValidatorSetResponse is the response type for the Query/GetValidatorSetByHeight RPC method. */ +export interface GetLatestValidatorSetResponseSDKType { + block_height: Long; + validators: ValidatorSDKType[]; + pagination?: PageResponseSDKType; +} +/** Validator is the type for the validator-set. */ +export interface Validator { + address: string; + pubKey?: Any; + votingPower: Long; + proposerPriority: Long; +} +/** Validator is the type for the validator-set. */ +export interface ValidatorSDKType { + address: string; + pub_key?: AnySDKType; + voting_power: Long; + proposer_priority: Long; +} +/** GetBlockByHeightRequest is the request type for the Query/GetBlockByHeight RPC method. */ +export interface GetBlockByHeightRequest { + height: Long; +} +/** GetBlockByHeightRequest is the request type for the Query/GetBlockByHeight RPC method. */ +export interface GetBlockByHeightRequestSDKType { + height: Long; +} +/** GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method. */ +export interface GetBlockByHeightResponse { + blockId?: BlockID; + block?: Block; +} +/** GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method. */ +export interface GetBlockByHeightResponseSDKType { + block_id?: BlockIDSDKType; + block?: BlockSDKType; +} +/** GetLatestBlockRequest is the request type for the Query/GetLatestBlock RPC method. */ +export interface GetLatestBlockRequest { +} +/** GetLatestBlockRequest is the request type for the Query/GetLatestBlock RPC method. */ +export interface GetLatestBlockRequestSDKType { +} +/** GetLatestBlockResponse is the response type for the Query/GetLatestBlock RPC method. */ +export interface GetLatestBlockResponse { + blockId?: BlockID; + block?: Block; +} +/** GetLatestBlockResponse is the response type for the Query/GetLatestBlock RPC method. */ +export interface GetLatestBlockResponseSDKType { + block_id?: BlockIDSDKType; + block?: BlockSDKType; +} +/** GetSyncingRequest is the request type for the Query/GetSyncing RPC method. */ +export interface GetSyncingRequest { +} +/** GetSyncingRequest is the request type for the Query/GetSyncing RPC method. */ +export interface GetSyncingRequestSDKType { +} +/** GetSyncingResponse is the response type for the Query/GetSyncing RPC method. */ +export interface GetSyncingResponse { + syncing: boolean; +} +/** GetSyncingResponse is the response type for the Query/GetSyncing RPC method. */ +export interface GetSyncingResponseSDKType { + syncing: boolean; +} +/** GetNodeInfoRequest is the request type for the Query/GetNodeInfo RPC method. */ +export interface GetNodeInfoRequest { +} +/** GetNodeInfoRequest is the request type for the Query/GetNodeInfo RPC method. */ +export interface GetNodeInfoRequestSDKType { +} +/** GetNodeInfoResponse is the response type for the Query/GetNodeInfo RPC method. */ +export interface GetNodeInfoResponse { + nodeInfo?: NodeInfo; + applicationVersion?: VersionInfo; +} +/** GetNodeInfoResponse is the response type for the Query/GetNodeInfo RPC method. */ +export interface GetNodeInfoResponseSDKType { + node_info?: NodeInfoSDKType; + application_version?: VersionInfoSDKType; +} +/** VersionInfo is the type for the GetNodeInfoResponse message. */ +export interface VersionInfo { + name: string; + appName: string; + version: string; + gitCommit: string; + buildTags: string; + goVersion: string; + buildDeps: Module[]; + /** Since: cosmos-sdk 0.43 */ + cosmosSdkVersion: string; +} +/** VersionInfo is the type for the GetNodeInfoResponse message. */ +export interface VersionInfoSDKType { + name: string; + app_name: string; + version: string; + git_commit: string; + build_tags: string; + go_version: string; + build_deps: ModuleSDKType[]; + cosmos_sdk_version: string; +} +/** Module is the type for VersionInfo */ +export interface Module { + /** module path */ + path: string; + /** module version */ + version: string; + /** checksum */ + sum: string; +} +/** Module is the type for VersionInfo */ +export interface ModuleSDKType { + path: string; + version: string; + sum: string; +} +export declare const GetValidatorSetByHeightRequest: { + encode(message: GetValidatorSetByHeightRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetValidatorSetByHeightRequest; + fromPartial(object: DeepPartial): GetValidatorSetByHeightRequest; +}; +export declare const GetValidatorSetByHeightResponse: { + encode(message: GetValidatorSetByHeightResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetValidatorSetByHeightResponse; + fromPartial(object: DeepPartial): GetValidatorSetByHeightResponse; +}; +export declare const GetLatestValidatorSetRequest: { + encode(message: GetLatestValidatorSetRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestValidatorSetRequest; + fromPartial(object: DeepPartial): GetLatestValidatorSetRequest; +}; +export declare const GetLatestValidatorSetResponse: { + encode(message: GetLatestValidatorSetResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestValidatorSetResponse; + fromPartial(object: DeepPartial): GetLatestValidatorSetResponse; +}; +export declare const Validator: { + encode(message: Validator, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Validator; + fromPartial(object: DeepPartial): Validator; +}; +export declare const GetBlockByHeightRequest: { + encode(message: GetBlockByHeightRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockByHeightRequest; + fromPartial(object: DeepPartial): GetBlockByHeightRequest; +}; +export declare const GetBlockByHeightResponse: { + encode(message: GetBlockByHeightResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockByHeightResponse; + fromPartial(object: DeepPartial): GetBlockByHeightResponse; +}; +export declare const GetLatestBlockRequest: { + encode(_: GetLatestBlockRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestBlockRequest; + fromPartial(_: DeepPartial): GetLatestBlockRequest; +}; +export declare const GetLatestBlockResponse: { + encode(message: GetLatestBlockResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestBlockResponse; + fromPartial(object: DeepPartial): GetLatestBlockResponse; +}; +export declare const GetSyncingRequest: { + encode(_: GetSyncingRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetSyncingRequest; + fromPartial(_: DeepPartial): GetSyncingRequest; +}; +export declare const GetSyncingResponse: { + encode(message: GetSyncingResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetSyncingResponse; + fromPartial(object: DeepPartial): GetSyncingResponse; +}; +export declare const GetNodeInfoRequest: { + encode(_: GetNodeInfoRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetNodeInfoRequest; + fromPartial(_: DeepPartial): GetNodeInfoRequest; +}; +export declare const GetNodeInfoResponse: { + encode(message: GetNodeInfoResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetNodeInfoResponse; + fromPartial(object: DeepPartial): GetNodeInfoResponse; +}; +export declare const VersionInfo: { + encode(message: VersionInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): VersionInfo; + fromPartial(object: DeepPartial): VersionInfo; +}; +export declare const Module: { + encode(message: Module, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Module; + fromPartial(object: DeepPartial): Module; +}; diff --git a/packages/codegen/dist/cosmos/base/tendermint/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/base/tendermint/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..2baf7f20 --- /dev/null +++ b/packages/codegen/dist/cosmos/base/tendermint/v1beta1/query.lcd.d.ts @@ -0,0 +1,14 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { GetNodeInfoRequest, GetNodeInfoResponseSDKType, GetSyncingRequest, GetSyncingResponseSDKType, GetLatestBlockRequest, GetLatestBlockResponseSDKType, GetBlockByHeightRequest, GetBlockByHeightResponseSDKType, GetLatestValidatorSetRequest, GetLatestValidatorSetResponseSDKType, GetValidatorSetByHeightRequest, GetValidatorSetByHeightResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + getNodeInfo(_params?: GetNodeInfoRequest): Promise; + getSyncing(_params?: GetSyncingRequest): Promise; + getLatestBlock(_params?: GetLatestBlockRequest): Promise; + getBlockByHeight(params: GetBlockByHeightRequest): Promise; + getLatestValidatorSet(params?: GetLatestValidatorSetRequest): Promise; + getValidatorSetByHeight(params: GetValidatorSetByHeightRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/base/tendermint/v1beta1/query.rpc.Service.d.ts b/packages/codegen/dist/cosmos/base/tendermint/v1beta1/query.rpc.Service.d.ts new file mode 100644 index 00000000..e1584b9d --- /dev/null +++ b/packages/codegen/dist/cosmos/base/tendermint/v1beta1/query.rpc.Service.d.ts @@ -0,0 +1,36 @@ +import { Rpc } from "../../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { GetNodeInfoRequest, GetNodeInfoResponse, GetSyncingRequest, GetSyncingResponse, GetLatestBlockRequest, GetLatestBlockResponse, GetBlockByHeightRequest, GetBlockByHeightResponse, GetLatestValidatorSetRequest, GetLatestValidatorSetResponse, GetValidatorSetByHeightRequest, GetValidatorSetByHeightResponse } from "./query"; +/** Service defines the gRPC querier service for tendermint queries. */ +export interface Service { + /** GetNodeInfo queries the current node info. */ + getNodeInfo(request?: GetNodeInfoRequest): Promise; + /** GetSyncing queries node syncing. */ + getSyncing(request?: GetSyncingRequest): Promise; + /** GetLatestBlock returns the latest block. */ + getLatestBlock(request?: GetLatestBlockRequest): Promise; + /** GetBlockByHeight queries block for given height. */ + getBlockByHeight(request: GetBlockByHeightRequest): Promise; + /** GetLatestValidatorSet queries latest validator-set. */ + getLatestValidatorSet(request?: GetLatestValidatorSetRequest): Promise; + /** GetValidatorSetByHeight queries validator-set at a given height. */ + getValidatorSetByHeight(request: GetValidatorSetByHeightRequest): Promise; +} +export declare class ServiceClientImpl implements Service { + private readonly rpc; + constructor(rpc: Rpc); + getNodeInfo(request?: GetNodeInfoRequest): Promise; + getSyncing(request?: GetSyncingRequest): Promise; + getLatestBlock(request?: GetLatestBlockRequest): Promise; + getBlockByHeight(request: GetBlockByHeightRequest): Promise; + getLatestValidatorSet(request?: GetLatestValidatorSetRequest): Promise; + getValidatorSetByHeight(request: GetValidatorSetByHeightRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + getNodeInfo(request?: GetNodeInfoRequest): Promise; + getSyncing(request?: GetSyncingRequest): Promise; + getLatestBlock(request?: GetLatestBlockRequest): Promise; + getBlockByHeight(request: GetBlockByHeightRequest): Promise; + getLatestValidatorSet(request?: GetLatestValidatorSetRequest): Promise; + getValidatorSetByHeight(request: GetValidatorSetByHeightRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/base/v1beta1/coin.d.ts b/packages/codegen/dist/cosmos/base/v1beta1/coin.d.ts new file mode 100644 index 00000000..8fb1fcd3 --- /dev/null +++ b/packages/codegen/dist/cosmos/base/v1beta1/coin.d.ts @@ -0,0 +1,78 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * Coin defines a token with a denomination and an amount. + * + * NOTE: The amount field is an Int which implements the custom method + * signatures required by gogoproto. + */ +export interface Coin { + denom: string; + amount: string; +} +/** + * Coin defines a token with a denomination and an amount. + * + * NOTE: The amount field is an Int which implements the custom method + * signatures required by gogoproto. + */ +export interface CoinSDKType { + denom: string; + amount: string; +} +/** + * DecCoin defines a token with a denomination and a decimal amount. + * + * NOTE: The amount field is an Dec which implements the custom method + * signatures required by gogoproto. + */ +export interface DecCoin { + denom: string; + amount: string; +} +/** + * DecCoin defines a token with a denomination and a decimal amount. + * + * NOTE: The amount field is an Dec which implements the custom method + * signatures required by gogoproto. + */ +export interface DecCoinSDKType { + denom: string; + amount: string; +} +/** IntProto defines a Protobuf wrapper around an Int object. */ +export interface IntProto { + int: string; +} +/** IntProto defines a Protobuf wrapper around an Int object. */ +export interface IntProtoSDKType { + int: string; +} +/** DecProto defines a Protobuf wrapper around a Dec object. */ +export interface DecProto { + dec: string; +} +/** DecProto defines a Protobuf wrapper around a Dec object. */ +export interface DecProtoSDKType { + dec: string; +} +export declare const Coin: { + encode(message: Coin, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Coin; + fromPartial(object: DeepPartial): Coin; +}; +export declare const DecCoin: { + encode(message: DecCoin, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DecCoin; + fromPartial(object: DeepPartial): DecCoin; +}; +export declare const IntProto: { + encode(message: IntProto, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): IntProto; + fromPartial(object: DeepPartial): IntProto; +}; +export declare const DecProto: { + encode(message: DecProto, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DecProto; + fromPartial(object: DeepPartial): DecProto; +}; diff --git a/packages/codegen/dist/cosmos/bundle.d.ts b/packages/codegen/dist/cosmos/bundle.d.ts new file mode 100644 index 00000000..8769f2e4 --- /dev/null +++ b/packages/codegen/dist/cosmos/bundle.d.ts @@ -0,0 +1,8995 @@ +/// +import * as _3 from "./app/v1alpha1/config"; +import * as _4 from "./app/v1alpha1/module"; +import * as _5 from "./app/v1alpha1/query"; +import * as _6 from "./auth/v1beta1/auth"; +import * as _7 from "./auth/v1beta1/genesis"; +import * as _8 from "./auth/v1beta1/query"; +import * as _9 from "./authz/v1beta1/authz"; +import * as _10 from "./authz/v1beta1/event"; +import * as _11 from "./authz/v1beta1/genesis"; +import * as _12 from "./authz/v1beta1/query"; +import * as _13 from "./authz/v1beta1/tx"; +import * as _14 from "./bank/v1beta1/authz"; +import * as _15 from "./bank/v1beta1/bank"; +import * as _16 from "./bank/v1beta1/genesis"; +import * as _17 from "./bank/v1beta1/query"; +import * as _18 from "./bank/v1beta1/tx"; +import * as _19 from "./base/abci/v1beta1/abci"; +import * as _20 from "./base/kv/v1beta1/kv"; +import * as _21 from "./base/query/v1beta1/pagination"; +import * as _22 from "./base/reflection/v1beta1/reflection"; +import * as _23 from "./base/reflection/v2alpha1/reflection"; +import * as _24 from "./base/snapshots/v1beta1/snapshot"; +import * as _25 from "./base/store/v1beta1/commit_info"; +import * as _26 from "./base/store/v1beta1/listening"; +import * as _27 from "./base/tendermint/v1beta1/query"; +import * as _28 from "./base/v1beta1/coin"; +import * as _29 from "./capability/v1beta1/capability"; +import * as _30 from "./capability/v1beta1/genesis"; +import * as _31 from "./crisis/v1beta1/genesis"; +import * as _32 from "./crisis/v1beta1/tx"; +import * as _33 from "./crypto/ed25519/keys"; +import * as _34 from "./crypto/hd/v1/hd"; +import * as _35 from "./crypto/keyring/v1/record"; +import * as _36 from "./crypto/multisig/keys"; +import * as _37 from "./crypto/secp256k1/keys"; +import * as _38 from "./crypto/secp256r1/keys"; +import * as _39 from "./distribution/v1beta1/distribution"; +import * as _40 from "./distribution/v1beta1/genesis"; +import * as _41 from "./distribution/v1beta1/query"; +import * as _42 from "./distribution/v1beta1/tx"; +import * as _43 from "./evidence/v1beta1/evidence"; +import * as _44 from "./evidence/v1beta1/genesis"; +import * as _45 from "./evidence/v1beta1/query"; +import * as _46 from "./evidence/v1beta1/tx"; +import * as _47 from "./feegrant/v1beta1/feegrant"; +import * as _48 from "./feegrant/v1beta1/genesis"; +import * as _49 from "./feegrant/v1beta1/query"; +import * as _50 from "./feegrant/v1beta1/tx"; +import * as _51 from "./genutil/v1beta1/genesis"; +import * as _52 from "./gov/v1/genesis"; +import * as _53 from "./gov/v1/gov"; +import * as _54 from "./gov/v1/query"; +import * as _55 from "./gov/v1/tx"; +import * as _56 from "./gov/v1beta1/genesis"; +import * as _57 from "./gov/v1beta1/gov"; +import * as _58 from "./gov/v1beta1/query"; +import * as _59 from "./gov/v1beta1/tx"; +import * as _60 from "./group/v1/events"; +import * as _61 from "./group/v1/genesis"; +import * as _62 from "./group/v1/query"; +import * as _63 from "./group/v1/tx"; +import * as _64 from "./group/v1/types"; +import * as _65 from "./mint/v1beta1/genesis"; +import * as _66 from "./mint/v1beta1/mint"; +import * as _67 from "./mint/v1beta1/query"; +import * as _69 from "./nft/v1beta1/event"; +import * as _70 from "./nft/v1beta1/genesis"; +import * as _71 from "./nft/v1beta1/nft"; +import * as _72 from "./nft/v1beta1/query"; +import * as _73 from "./nft/v1beta1/tx"; +import * as _74 from "./orm/v1/orm"; +import * as _75 from "./orm/v1alpha1/schema"; +import * as _76 from "./params/v1beta1/params"; +import * as _77 from "./params/v1beta1/query"; +import * as _78 from "./slashing/v1beta1/genesis"; +import * as _79 from "./slashing/v1beta1/query"; +import * as _80 from "./slashing/v1beta1/slashing"; +import * as _81 from "./slashing/v1beta1/tx"; +import * as _82 from "./staking/v1beta1/authz"; +import * as _83 from "./staking/v1beta1/genesis"; +import * as _84 from "./staking/v1beta1/query"; +import * as _85 from "./staking/v1beta1/staking"; +import * as _86 from "./staking/v1beta1/tx"; +import * as _87 from "./tx/signing/v1beta1/signing"; +import * as _88 from "./tx/v1beta1/service"; +import * as _89 from "./tx/v1beta1/tx"; +import * as _90 from "./upgrade/v1beta1/query"; +import * as _91 from "./upgrade/v1beta1/tx"; +import * as _92 from "./upgrade/v1beta1/upgrade"; +import * as _93 from "./vesting/v1beta1/tx"; +import * as _94 from "./vesting/v1beta1/vesting"; +import * as _145 from "./auth/v1beta1/query.lcd"; +import * as _146 from "./authz/v1beta1/query.lcd"; +import * as _147 from "./bank/v1beta1/query.lcd"; +import * as _148 from "./base/tendermint/v1beta1/query.lcd"; +import * as _149 from "./distribution/v1beta1/query.lcd"; +import * as _150 from "./evidence/v1beta1/query.lcd"; +import * as _151 from "./feegrant/v1beta1/query.lcd"; +import * as _152 from "./gov/v1/query.lcd"; +import * as _153 from "./gov/v1beta1/query.lcd"; +import * as _154 from "./group/v1/query.lcd"; +import * as _155 from "./mint/v1beta1/query.lcd"; +import * as _156 from "./nft/v1beta1/query.lcd"; +import * as _157 from "./params/v1beta1/query.lcd"; +import * as _158 from "./slashing/v1beta1/query.lcd"; +import * as _159 from "./staking/v1beta1/query.lcd"; +import * as _160 from "./tx/v1beta1/service.lcd"; +import * as _161 from "./upgrade/v1beta1/query.lcd"; +import * as _162 from "./app/v1alpha1/query.rpc.Query"; +import * as _163 from "./auth/v1beta1/query.rpc.Query"; +import * as _164 from "./authz/v1beta1/query.rpc.Query"; +import * as _165 from "./bank/v1beta1/query.rpc.Query"; +import * as _166 from "./base/tendermint/v1beta1/query.rpc.Service"; +import * as _167 from "./distribution/v1beta1/query.rpc.Query"; +import * as _168 from "./evidence/v1beta1/query.rpc.Query"; +import * as _169 from "./feegrant/v1beta1/query.rpc.Query"; +import * as _170 from "./gov/v1/query.rpc.Query"; +import * as _171 from "./gov/v1beta1/query.rpc.Query"; +import * as _172 from "./group/v1/query.rpc.Query"; +import * as _173 from "./mint/v1beta1/query.rpc.Query"; +import * as _174 from "./nft/v1beta1/query.rpc.Query"; +import * as _175 from "./params/v1beta1/query.rpc.Query"; +import * as _176 from "./slashing/v1beta1/query.rpc.Query"; +import * as _177 from "./staking/v1beta1/query.rpc.Query"; +import * as _178 from "./tx/v1beta1/service.rpc.Service"; +import * as _179 from "./upgrade/v1beta1/query.rpc.Query"; +import * as _180 from "./authz/v1beta1/tx.rpc.msg"; +import * as _181 from "./bank/v1beta1/tx.rpc.msg"; +import * as _182 from "./crisis/v1beta1/tx.rpc.msg"; +import * as _183 from "./distribution/v1beta1/tx.rpc.msg"; +import * as _184 from "./evidence/v1beta1/tx.rpc.msg"; +import * as _185 from "./feegrant/v1beta1/tx.rpc.msg"; +import * as _186 from "./gov/v1/tx.rpc.msg"; +import * as _187 from "./gov/v1beta1/tx.rpc.msg"; +import * as _188 from "./group/v1/tx.rpc.msg"; +import * as _189 from "./nft/v1beta1/tx.rpc.msg"; +import * as _190 from "./slashing/v1beta1/tx.rpc.msg"; +import * as _191 from "./staking/v1beta1/tx.rpc.msg"; +import * as _192 from "./upgrade/v1beta1/tx.rpc.msg"; +import * as _193 from "./vesting/v1beta1/tx.rpc.msg"; +export declare namespace cosmos { + namespace app { + const v1alpha1: { + QueryClientImpl: typeof _162.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + config(request?: _5.QueryConfigRequest): Promise<_5.QueryConfigResponse>; + }; + QueryConfigRequest: { + encode(_: _5.QueryConfigRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _5.QueryConfigRequest; + fromPartial(_: {}): _5.QueryConfigRequest; + }; + QueryConfigResponse: { + encode(message: _5.QueryConfigResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _5.QueryConfigResponse; + fromPartial(object: { + config?: { + modules?: { + name?: string; + config?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + }; + }): _5.QueryConfigResponse; + }; + ModuleDescriptor: { + encode(message: _4.ModuleDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _4.ModuleDescriptor; + fromPartial(object: { + goImport?: string; + usePackage?: { + name?: string; + revision?: number; + }[]; + canMigrateFrom?: { + module?: string; + }[]; + }): _4.ModuleDescriptor; + }; + PackageReference: { + encode(message: _4.PackageReference, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _4.PackageReference; + fromPartial(object: { + name?: string; + revision?: number; + }): _4.PackageReference; + }; + MigrateFromInfo: { + encode(message: _4.MigrateFromInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _4.MigrateFromInfo; + fromPartial(object: { + module?: string; + }): _4.MigrateFromInfo; + }; + Config: { + encode(message: _3.Config, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _3.Config; + fromPartial(object: { + modules?: { + name?: string; + config?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + }): _3.Config; + }; + ModuleConfig: { + encode(message: _3.ModuleConfig, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _3.ModuleConfig; + fromPartial(object: { + name?: string; + config?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _3.ModuleConfig; + }; + }; + } + namespace auth { + const v1beta1: { + QueryClientImpl: typeof _163.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + accounts(request?: _8.QueryAccountsRequest): Promise<_8.QueryAccountsResponse>; + account(request: _8.QueryAccountRequest): Promise<_8.QueryAccountResponse>; + params(request?: _8.QueryParamsRequest): Promise<_8.QueryParamsResponse>; + moduleAccounts(request?: _8.QueryModuleAccountsRequest): Promise<_8.QueryModuleAccountsResponse>; + bech32Prefix(request?: _8.Bech32PrefixRequest): Promise<_8.Bech32PrefixResponse>; + addressBytesToString(request: _8.AddressBytesToStringRequest): Promise<_8.AddressBytesToStringResponse>; + addressStringToBytes(request: _8.AddressStringToBytesRequest): Promise<_8.AddressStringToBytesResponse>; + }; + LCDQueryClient: typeof _145.LCDQueryClient; + QueryAccountsRequest: { + encode(message: _8.QueryAccountsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.QueryAccountsRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _8.QueryAccountsRequest; + }; + QueryAccountsResponse: { + encode(message: _8.QueryAccountsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.QueryAccountsResponse; + fromPartial(object: { + accounts?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _8.QueryAccountsResponse; + }; + QueryAccountRequest: { + encode(message: _8.QueryAccountRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.QueryAccountRequest; + fromPartial(object: { + address?: string; + }): _8.QueryAccountRequest; + }; + QueryModuleAccountsRequest: { + encode(_: _8.QueryModuleAccountsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.QueryModuleAccountsRequest; + fromPartial(_: {}): _8.QueryModuleAccountsRequest; + }; + QueryParamsResponse: { + encode(message: _8.QueryParamsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.QueryParamsResponse; + fromPartial(object: { + params?: { + maxMemoCharacters?: string | number | import("long").Long; + txSigLimit?: string | number | import("long").Long; + txSizeCostPerByte?: string | number | import("long").Long; + sigVerifyCostEd25519?: string | number | import("long").Long; + sigVerifyCostSecp256k1?: string | number | import("long").Long; + }; + }): _8.QueryParamsResponse; + }; + QueryAccountResponse: { + encode(message: _8.QueryAccountResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.QueryAccountResponse; + fromPartial(object: { + account?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _8.QueryAccountResponse; + }; + QueryParamsRequest: { + encode(_: _8.QueryParamsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.QueryParamsRequest; + fromPartial(_: {}): _8.QueryParamsRequest; + }; + QueryModuleAccountsResponse: { + encode(message: _8.QueryModuleAccountsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.QueryModuleAccountsResponse; + fromPartial(object: { + accounts?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }): _8.QueryModuleAccountsResponse; + }; + Bech32PrefixRequest: { + encode(_: _8.Bech32PrefixRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.Bech32PrefixRequest; + fromPartial(_: {}): _8.Bech32PrefixRequest; + }; + Bech32PrefixResponse: { + encode(message: _8.Bech32PrefixResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.Bech32PrefixResponse; + fromPartial(object: { + bech32Prefix?: string; + }): _8.Bech32PrefixResponse; + }; + AddressBytesToStringRequest: { + encode(message: _8.AddressBytesToStringRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.AddressBytesToStringRequest; + fromPartial(object: { + addressBytes?: Uint8Array; + }): _8.AddressBytesToStringRequest; + }; + AddressBytesToStringResponse: { + encode(message: _8.AddressBytesToStringResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.AddressBytesToStringResponse; + fromPartial(object: { + addressString?: string; + }): _8.AddressBytesToStringResponse; + }; + AddressStringToBytesRequest: { + encode(message: _8.AddressStringToBytesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.AddressStringToBytesRequest; + fromPartial(object: { + addressString?: string; + }): _8.AddressStringToBytesRequest; + }; + AddressStringToBytesResponse: { + encode(message: _8.AddressStringToBytesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _8.AddressStringToBytesResponse; + fromPartial(object: { + addressBytes?: Uint8Array; + }): _8.AddressStringToBytesResponse; + }; + GenesisState: { + encode(message: _7.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _7.GenesisState; + fromPartial(object: { + params?: { + maxMemoCharacters?: string | number | import("long").Long; + txSigLimit?: string | number | import("long").Long; + txSizeCostPerByte?: string | number | import("long").Long; + sigVerifyCostEd25519?: string | number | import("long").Long; + sigVerifyCostSecp256k1?: string | number | import("long").Long; + }; + accounts?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }): _7.GenesisState; + }; + BaseAccount: { + encode(message: _6.BaseAccount, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _6.BaseAccount; + fromPartial(object: { + address?: string; + pubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + accountNumber?: string | number | import("long").Long; + sequence?: string | number | import("long").Long; + }): _6.BaseAccount; + }; + ModuleAccount: { + encode(message: _6.ModuleAccount, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _6.ModuleAccount; + fromPartial(object: { + baseAccount?: { + address?: string; + pubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + accountNumber?: string | number | import("long").Long; + sequence?: string | number | import("long").Long; + }; + name?: string; + permissions?: string[]; + }): _6.ModuleAccount; + }; + Params: { + encode(message: _6.Params, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _6.Params; + fromPartial(object: { + maxMemoCharacters?: string | number | import("long").Long; + txSigLimit?: string | number | import("long").Long; + txSizeCostPerByte?: string | number | import("long").Long; + sigVerifyCostEd25519?: string | number | import("long").Long; + sigVerifyCostSecp256k1?: string | number | import("long").Long; + }): _6.Params; + }; + }; + } + namespace authz { + const v1beta1: { + MsgClientImpl: typeof _180.MsgClientImpl; + QueryClientImpl: typeof _164.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + grants(request: _12.QueryGrantsRequest): Promise<_12.QueryGrantsResponse>; + granterGrants(request: _12.QueryGranterGrantsRequest): Promise<_12.QueryGranterGrantsResponse>; + granteeGrants(request: _12.QueryGranteeGrantsRequest): Promise<_12.QueryGranteeGrantsResponse>; + }; + LCDQueryClient: typeof _146.LCDQueryClient; + MsgGrant: { + encode(message: _13.MsgGrant, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _13.MsgGrant; + fromPartial(object: { + granter?: string; + grantee?: string; + grant?: { + authorization?: { + typeUrl?: string; + value?: Uint8Array; + }; + expiration?: Date; + }; + }): _13.MsgGrant; + }; + MsgExecResponse: { + encode(message: _13.MsgExecResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _13.MsgExecResponse; + fromPartial(object: { + results?: Uint8Array[]; + }): _13.MsgExecResponse; + }; + MsgExec: { + encode(message: _13.MsgExec, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _13.MsgExec; + fromPartial(object: { + grantee?: string; + msgs?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }): _13.MsgExec; + }; + MsgGrantResponse: { + encode(_: _13.MsgGrantResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _13.MsgGrantResponse; + fromPartial(_: {}): _13.MsgGrantResponse; + }; + MsgRevoke: { + encode(message: _13.MsgRevoke, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _13.MsgRevoke; + fromPartial(object: { + granter?: string; + grantee?: string; + msgTypeUrl?: string; + }): _13.MsgRevoke; + }; + MsgRevokeResponse: { + encode(_: _13.MsgRevokeResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _13.MsgRevokeResponse; + fromPartial(_: {}): _13.MsgRevokeResponse; + }; + QueryGrantsRequest: { + encode(message: _12.QueryGrantsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _12.QueryGrantsRequest; + fromPartial(object: { + granter?: string; + grantee?: string; + msgTypeUrl?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _12.QueryGrantsRequest; + }; + QueryGrantsResponse: { + encode(message: _12.QueryGrantsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _12.QueryGrantsResponse; + fromPartial(object: { + grants?: { + authorization?: { + typeUrl?: string; + value?: Uint8Array; + }; + expiration?: Date; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _12.QueryGrantsResponse; + }; + QueryGranterGrantsRequest: { + encode(message: _12.QueryGranterGrantsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _12.QueryGranterGrantsRequest; + fromPartial(object: { + granter?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _12.QueryGranterGrantsRequest; + }; + QueryGranterGrantsResponse: { + encode(message: _12.QueryGranterGrantsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _12.QueryGranterGrantsResponse; + fromPartial(object: { + grants?: { + granter?: string; + grantee?: string; + authorization?: { + typeUrl?: string; + value?: Uint8Array; + }; + expiration?: Date; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _12.QueryGranterGrantsResponse; + }; + QueryGranteeGrantsRequest: { + encode(message: _12.QueryGranteeGrantsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _12.QueryGranteeGrantsRequest; + fromPartial(object: { + grantee?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _12.QueryGranteeGrantsRequest; + }; + QueryGranteeGrantsResponse: { + encode(message: _12.QueryGranteeGrantsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _12.QueryGranteeGrantsResponse; + fromPartial(object: { + grants?: { + granter?: string; + grantee?: string; + authorization?: { + typeUrl?: string; + value?: Uint8Array; + }; + expiration?: Date; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _12.QueryGranteeGrantsResponse; + }; + GenesisState: { + encode(message: _11.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _11.GenesisState; + fromPartial(object: { + authorization?: { + granter?: string; + grantee?: string; + authorization?: { + typeUrl?: string; + value?: Uint8Array; + }; + expiration?: Date; + }[]; + }): _11.GenesisState; + }; + EventGrant: { + encode(message: _10.EventGrant, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _10.EventGrant; + fromPartial(object: { + msgTypeUrl?: string; + granter?: string; + grantee?: string; + }): _10.EventGrant; + }; + EventRevoke: { + encode(message: _10.EventRevoke, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _10.EventRevoke; + fromPartial(object: { + msgTypeUrl?: string; + granter?: string; + grantee?: string; + }): _10.EventRevoke; + }; + GenericAuthorization: { + encode(message: _9.GenericAuthorization, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _9.GenericAuthorization; + fromPartial(object: { + msg?: string; + }): _9.GenericAuthorization; + }; + Grant: { + encode(message: _9.Grant, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _9.Grant; + fromPartial(object: { + authorization?: { + typeUrl?: string; + value?: Uint8Array; + }; + expiration?: Date; + }): _9.Grant; + }; + GrantAuthorization: { + encode(message: _9.GrantAuthorization, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _9.GrantAuthorization; + fromPartial(object: { + granter?: string; + grantee?: string; + authorization?: { + typeUrl?: string; + value?: Uint8Array; + }; + expiration?: Date; + }): _9.GrantAuthorization; + }; + GrantQueueItem: { + encode(message: _9.GrantQueueItem, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _9.GrantQueueItem; + fromPartial(object: { + msgTypeUrls?: string[]; + }): _9.GrantQueueItem; + }; + }; + } + namespace bank { + const v1beta1: { + MsgClientImpl: typeof _181.MsgClientImpl; + QueryClientImpl: typeof _165.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + balance(request: _17.QueryBalanceRequest): Promise<_17.QueryBalanceResponse>; + allBalances(request: _17.QueryAllBalancesRequest): Promise<_17.QueryAllBalancesResponse>; + spendableBalances(request: _17.QuerySpendableBalancesRequest): Promise<_17.QuerySpendableBalancesResponse>; + totalSupply(request?: _17.QueryTotalSupplyRequest): Promise<_17.QueryTotalSupplyResponse>; + supplyOf(request: _17.QuerySupplyOfRequest): Promise<_17.QuerySupplyOfResponse>; + params(request?: _17.QueryParamsRequest): Promise<_17.QueryParamsResponse>; + denomMetadata(request: _17.QueryDenomMetadataRequest): Promise<_17.QueryDenomMetadataResponse>; + denomsMetadata(request?: _17.QueryDenomsMetadataRequest): Promise<_17.QueryDenomsMetadataResponse>; + denomOwners(request: _17.QueryDenomOwnersRequest): Promise<_17.QueryDenomOwnersResponse>; + }; + LCDQueryClient: typeof _147.LCDQueryClient; + MsgSend: { + encode(message: _18.MsgSend, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _18.MsgSend; + fromPartial(object: { + fromAddress?: string; + toAddress?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }): _18.MsgSend; + }; + MsgSendResponse: { + encode(_: _18.MsgSendResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _18.MsgSendResponse; + fromPartial(_: {}): _18.MsgSendResponse; + }; + MsgMultiSend: { + encode(message: _18.MsgMultiSend, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _18.MsgMultiSend; + fromPartial(object: { + inputs?: { + address?: string; + coins?: { + denom?: string; + amount?: string; + }[]; + }[]; + outputs?: { + address?: string; + coins?: { + denom?: string; + amount?: string; + }[]; + }[]; + }): _18.MsgMultiSend; + }; + MsgMultiSendResponse: { + encode(_: _18.MsgMultiSendResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _18.MsgMultiSendResponse; + fromPartial(_: {}): _18.MsgMultiSendResponse; + }; + QueryBalanceRequest: { + encode(message: _17.QueryBalanceRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryBalanceRequest; + fromPartial(object: { + address?: string; + denom?: string; + }): _17.QueryBalanceRequest; + }; + QueryBalanceResponse: { + encode(message: _17.QueryBalanceResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryBalanceResponse; + fromPartial(object: { + balance?: { + denom?: string; + amount?: string; + }; + }): _17.QueryBalanceResponse; + }; + QueryAllBalancesRequest: { + encode(message: _17.QueryAllBalancesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryAllBalancesRequest; + fromPartial(object: { + address?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _17.QueryAllBalancesRequest; + }; + QueryAllBalancesResponse: { + encode(message: _17.QueryAllBalancesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryAllBalancesResponse; + fromPartial(object: { + balances?: { + denom?: string; + amount?: string; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _17.QueryAllBalancesResponse; + }; + QuerySpendableBalancesRequest: { + encode(message: _17.QuerySpendableBalancesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QuerySpendableBalancesRequest; + fromPartial(object: { + address?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _17.QuerySpendableBalancesRequest; + }; + QuerySpendableBalancesResponse: { + encode(message: _17.QuerySpendableBalancesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QuerySpendableBalancesResponse; + fromPartial(object: { + balances?: { + denom?: string; + amount?: string; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _17.QuerySpendableBalancesResponse; + }; + QueryTotalSupplyRequest: { + encode(message: _17.QueryTotalSupplyRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryTotalSupplyRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _17.QueryTotalSupplyRequest; + }; + QueryTotalSupplyResponse: { + encode(message: _17.QueryTotalSupplyResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryTotalSupplyResponse; + fromPartial(object: { + supply?: { + denom?: string; + amount?: string; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _17.QueryTotalSupplyResponse; + }; + QuerySupplyOfRequest: { + encode(message: _17.QuerySupplyOfRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QuerySupplyOfRequest; + fromPartial(object: { + denom?: string; + }): _17.QuerySupplyOfRequest; + }; + QuerySupplyOfResponse: { + encode(message: _17.QuerySupplyOfResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QuerySupplyOfResponse; + fromPartial(object: { + amount?: { + denom?: string; + amount?: string; + }; + }): _17.QuerySupplyOfResponse; + }; + QueryParamsRequest: { + encode(_: _17.QueryParamsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryParamsRequest; + fromPartial(_: {}): _17.QueryParamsRequest; + }; + QueryParamsResponse: { + encode(message: _17.QueryParamsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryParamsResponse; + fromPartial(object: { + params?: { + sendEnabled?: { + denom?: string; + enabled?: boolean; + }[]; + defaultSendEnabled?: boolean; + }; + }): _17.QueryParamsResponse; + }; + QueryDenomsMetadataRequest: { + encode(message: _17.QueryDenomsMetadataRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryDenomsMetadataRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _17.QueryDenomsMetadataRequest; + }; + QueryDenomsMetadataResponse: { + encode(message: _17.QueryDenomsMetadataResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryDenomsMetadataResponse; + fromPartial(object: { + metadatas?: { + description?: string; + denomUnits?: { + denom?: string; + exponent?: number; + aliases?: string[]; + }[]; + base?: string; + display?: string; + name?: string; + symbol?: string; + uri?: string; + uriHash?: string; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _17.QueryDenomsMetadataResponse; + }; + QueryDenomMetadataRequest: { + encode(message: _17.QueryDenomMetadataRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryDenomMetadataRequest; + fromPartial(object: { + denom?: string; + }): _17.QueryDenomMetadataRequest; + }; + QueryDenomMetadataResponse: { + encode(message: _17.QueryDenomMetadataResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryDenomMetadataResponse; + fromPartial(object: { + metadata?: { + description?: string; + denomUnits?: { + denom?: string; + exponent?: number; + aliases?: string[]; + }[]; + base?: string; + display?: string; + name?: string; + symbol?: string; + uri?: string; + uriHash?: string; + }; + }): _17.QueryDenomMetadataResponse; + }; + QueryDenomOwnersRequest: { + encode(message: _17.QueryDenomOwnersRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryDenomOwnersRequest; + fromPartial(object: { + denom?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _17.QueryDenomOwnersRequest; + }; + DenomOwner: { + encode(message: _17.DenomOwner, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.DenomOwner; + fromPartial(object: { + address?: string; + balance?: { + denom?: string; + amount?: string; + }; + }): _17.DenomOwner; + }; + QueryDenomOwnersResponse: { + encode(message: _17.QueryDenomOwnersResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _17.QueryDenomOwnersResponse; + fromPartial(object: { + denomOwners?: { + address?: string; + balance?: { + denom?: string; + amount?: string; + }; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _17.QueryDenomOwnersResponse; + }; + GenesisState: { + encode(message: _16.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _16.GenesisState; + fromPartial(object: { + params?: { + sendEnabled?: { + denom?: string; + enabled?: boolean; + }[]; + defaultSendEnabled?: boolean; + }; + balances?: { + address?: string; + coins?: { + denom?: string; + amount?: string; + }[]; + }[]; + supply?: { + denom?: string; + amount?: string; + }[]; + denomMetadata?: { + description?: string; + denomUnits?: { + denom?: string; + exponent?: number; + aliases?: string[]; + }[]; + base?: string; + display?: string; + name?: string; + symbol?: string; + uri?: string; + uriHash?: string; + }[]; + }): _16.GenesisState; + }; + Balance: { + encode(message: _16.Balance, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _16.Balance; + fromPartial(object: { + address?: string; + coins?: { + denom?: string; + amount?: string; + }[]; + }): _16.Balance; + }; + Params: { + encode(message: _15.Params, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _15.Params; + fromPartial(object: { + sendEnabled?: { + denom?: string; + enabled?: boolean; + }[]; + defaultSendEnabled?: boolean; + }): _15.Params; + }; + SendEnabled: { + encode(message: _15.SendEnabled, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _15.SendEnabled; + fromPartial(object: { + denom?: string; + enabled?: boolean; + }): _15.SendEnabled; + }; + Input: { + encode(message: _15.Input, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _15.Input; + fromPartial(object: { + address?: string; + coins?: { + denom?: string; + amount?: string; + }[]; + }): _15.Input; + }; + Output: { + encode(message: _15.Output, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _15.Output; + fromPartial(object: { + address?: string; + coins?: { + denom?: string; + amount?: string; + }[]; + }): _15.Output; + }; + Supply: { + encode(message: _15.Supply, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _15.Supply; + fromPartial(object: { + total?: { + denom?: string; + amount?: string; + }[]; + }): _15.Supply; + }; + DenomUnit: { + encode(message: _15.DenomUnit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _15.DenomUnit; + fromPartial(object: { + denom?: string; + exponent?: number; + aliases?: string[]; + }): _15.DenomUnit; + }; + Metadata: { + encode(message: _15.Metadata, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _15.Metadata; + fromPartial(object: { + description?: string; + denomUnits?: { + denom?: string; + exponent?: number; + aliases?: string[]; + }[]; + base?: string; + display?: string; + name?: string; + symbol?: string; + uri?: string; + uriHash?: string; + }): _15.Metadata; + }; + SendAuthorization: { + encode(message: _14.SendAuthorization, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _14.SendAuthorization; + fromPartial(object: { + spendLimit?: { + denom?: string; + amount?: string; + }[]; + }): _14.SendAuthorization; + }; + }; + } + namespace base { + namespace abci { + const v1beta1: { + TxResponse: { + encode(message: _19.TxResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _19.TxResponse; + fromPartial(object: { + height?: string | number | import("long").Long; + txhash?: string; + codespace?: string; + code?: number; + data?: string; + rawLog?: string; + logs?: { + msgIndex?: number; + log?: string; + events?: { + type?: string; + attributes?: { + key?: string; + value?: string; + }[]; + }[]; + }[]; + info?: string; + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + tx?: { + typeUrl?: string; + value?: Uint8Array; + }; + timestamp?: string; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + }): _19.TxResponse; + }; + ABCIMessageLog: { + encode(message: _19.ABCIMessageLog, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _19.ABCIMessageLog; + fromPartial(object: { + msgIndex?: number; + log?: string; + events?: { + type?: string; + attributes?: { + key?: string; + value?: string; + }[]; + }[]; + }): _19.ABCIMessageLog; + }; + StringEvent: { + encode(message: _19.StringEvent, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _19.StringEvent; + fromPartial(object: { + type?: string; + attributes?: { + key?: string; + value?: string; + }[]; + }): _19.StringEvent; + }; + Attribute: { + encode(message: _19.Attribute, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _19.Attribute; + fromPartial(object: { + key?: string; + value?: string; + }): _19.Attribute; + }; + GasInfo: { + encode(message: _19.GasInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _19.GasInfo; + fromPartial(object: { + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + }): _19.GasInfo; + }; + Result: { + encode(message: _19.Result, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _19.Result; + fromPartial(object: { + data?: Uint8Array; + log?: string; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + msgResponses?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }): _19.Result; + }; + SimulationResponse: { + encode(message: _19.SimulationResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _19.SimulationResponse; + fromPartial(object: { + gasInfo?: { + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + }; + result?: { + data?: Uint8Array; + log?: string; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + msgResponses?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }; + }): _19.SimulationResponse; + }; + MsgData: { + encode(message: _19.MsgData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _19.MsgData; + fromPartial(object: { + msgType?: string; + data?: Uint8Array; + }): _19.MsgData; + }; + TxMsgData: { + encode(message: _19.TxMsgData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _19.TxMsgData; + fromPartial(object: { + data?: { + msgType?: string; + data?: Uint8Array; + }[]; + msgResponses?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }): _19.TxMsgData; + }; + SearchTxsResult: { + encode(message: _19.SearchTxsResult, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _19.SearchTxsResult; + fromPartial(object: { + totalCount?: string | number | import("long").Long; + count?: string | number | import("long").Long; + pageNumber?: string | number | import("long").Long; + pageTotal?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + txs?: { + height?: string | number | import("long").Long; + txhash?: string; + codespace?: string; + code?: number; + data?: string; + rawLog?: string; + logs?: { + msgIndex?: number; + log?: string; + events?: { + type?: string; + attributes?: { + key?: string; + value?: string; + }[]; + }[]; + }[]; + info?: string; + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + tx?: { + typeUrl?: string; + value?: Uint8Array; + }; + timestamp?: string; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + }[]; + }): _19.SearchTxsResult; + }; + }; + } + namespace kv { + const v1beta1: { + Pairs: { + encode(message: _20.Pairs, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _20.Pairs; + fromPartial(object: { + pairs?: { + key?: Uint8Array; + value?: Uint8Array; + }[]; + }): _20.Pairs; + }; + Pair: { + encode(message: _20.Pair, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _20.Pair; + fromPartial(object: { + key?: Uint8Array; + value?: Uint8Array; + }): _20.Pair; + }; + }; + } + namespace query { + const v1beta1: { + PageRequest: { + encode(message: _21.PageRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _21.PageRequest; + fromPartial(object: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }): _21.PageRequest; + }; + PageResponse: { + encode(message: _21.PageResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _21.PageResponse; + fromPartial(object: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }): _21.PageResponse; + }; + }; + } + namespace reflection { + const v1beta1: { + ListAllInterfacesRequest: { + encode(_: _22.ListAllInterfacesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _22.ListAllInterfacesRequest; + fromPartial(_: {}): _22.ListAllInterfacesRequest; + }; + ListAllInterfacesResponse: { + encode(message: _22.ListAllInterfacesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _22.ListAllInterfacesResponse; + fromPartial(object: { + interfaceNames?: string[]; + }): _22.ListAllInterfacesResponse; + }; + ListImplementationsRequest: { + encode(message: _22.ListImplementationsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _22.ListImplementationsRequest; + fromPartial(object: { + interfaceName?: string; + }): _22.ListImplementationsRequest; + }; + ListImplementationsResponse: { + encode(message: _22.ListImplementationsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _22.ListImplementationsResponse; + fromPartial(object: { + implementationMessageNames?: string[]; + }): _22.ListImplementationsResponse; + }; + }; + const v2alpha1: { + AppDescriptor: { + encode(message: _23.AppDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.AppDescriptor; + fromPartial(object: { + authn?: { + signModes?: { + name?: string; + number?: number; + authnInfoProviderMethodFullname?: string; + }[]; + }; + chain?: { + id?: string; + }; + codec?: { + interfaces?: { + fullname?: string; + interfaceAcceptingMessages?: { + fullname?: string; + fieldDescriptorNames?: string[]; + }[]; + interfaceImplementers?: { + fullname?: string; + typeUrl?: string; + }[]; + }[]; + }; + configuration?: { + bech32AccountAddressPrefix?: string; + }; + queryServices?: { + queryServices?: { + fullname?: string; + isModule?: boolean; + methods?: { + name?: string; + fullQueryPath?: string; + }[]; + }[]; + }; + tx?: { + fullname?: string; + msgs?: { + msgTypeUrl?: string; + }[]; + }; + }): _23.AppDescriptor; + }; + TxDescriptor: { + encode(message: _23.TxDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.TxDescriptor; + fromPartial(object: { + fullname?: string; + msgs?: { + msgTypeUrl?: string; + }[]; + }): _23.TxDescriptor; + }; + AuthnDescriptor: { + encode(message: _23.AuthnDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.AuthnDescriptor; + fromPartial(object: { + signModes?: { + name?: string; + number?: number; + authnInfoProviderMethodFullname?: string; + }[]; + }): _23.AuthnDescriptor; + }; + SigningModeDescriptor: { + encode(message: _23.SigningModeDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.SigningModeDescriptor; + fromPartial(object: { + name?: string; + number?: number; + authnInfoProviderMethodFullname?: string; + }): _23.SigningModeDescriptor; + }; + ChainDescriptor: { + encode(message: _23.ChainDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.ChainDescriptor; + fromPartial(object: { + id?: string; + }): _23.ChainDescriptor; + }; + CodecDescriptor: { + encode(message: _23.CodecDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.CodecDescriptor; + fromPartial(object: { + interfaces?: { + fullname?: string; + interfaceAcceptingMessages?: { + fullname?: string; + fieldDescriptorNames?: string[]; + }[]; + interfaceImplementers?: { + fullname?: string; + typeUrl?: string; + }[]; + }[]; + }): _23.CodecDescriptor; + }; + InterfaceDescriptor: { + encode(message: _23.InterfaceDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.InterfaceDescriptor; + fromPartial(object: { + fullname?: string; + interfaceAcceptingMessages?: { + fullname?: string; + fieldDescriptorNames?: string[]; + }[]; + interfaceImplementers?: { + fullname?: string; + typeUrl?: string; + }[]; + }): _23.InterfaceDescriptor; + }; + InterfaceImplementerDescriptor: { + encode(message: _23.InterfaceImplementerDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.InterfaceImplementerDescriptor; + fromPartial(object: { + fullname?: string; + typeUrl?: string; + }): _23.InterfaceImplementerDescriptor; + }; + InterfaceAcceptingMessageDescriptor: { + encode(message: _23.InterfaceAcceptingMessageDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.InterfaceAcceptingMessageDescriptor; + fromPartial(object: { + fullname?: string; + fieldDescriptorNames?: string[]; + }): _23.InterfaceAcceptingMessageDescriptor; + }; + ConfigurationDescriptor: { + encode(message: _23.ConfigurationDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.ConfigurationDescriptor; + fromPartial(object: { + bech32AccountAddressPrefix?: string; + }): _23.ConfigurationDescriptor; + }; + MsgDescriptor: { + encode(message: _23.MsgDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.MsgDescriptor; + fromPartial(object: { + msgTypeUrl?: string; + }): _23.MsgDescriptor; + }; + GetAuthnDescriptorRequest: { + encode(_: _23.GetAuthnDescriptorRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.GetAuthnDescriptorRequest; + fromPartial(_: {}): _23.GetAuthnDescriptorRequest; + }; + GetAuthnDescriptorResponse: { + encode(message: _23.GetAuthnDescriptorResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.GetAuthnDescriptorResponse; + fromPartial(object: { + authn?: { + signModes?: { + name?: string; + number?: number; + authnInfoProviderMethodFullname?: string; + }[]; + }; + }): _23.GetAuthnDescriptorResponse; + }; + GetChainDescriptorRequest: { + encode(_: _23.GetChainDescriptorRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.GetChainDescriptorRequest; + fromPartial(_: {}): _23.GetChainDescriptorRequest; + }; + GetChainDescriptorResponse: { + encode(message: _23.GetChainDescriptorResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.GetChainDescriptorResponse; + fromPartial(object: { + chain?: { + id?: string; + }; + }): _23.GetChainDescriptorResponse; + }; + GetCodecDescriptorRequest: { + encode(_: _23.GetCodecDescriptorRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.GetCodecDescriptorRequest; + fromPartial(_: {}): _23.GetCodecDescriptorRequest; + }; + GetCodecDescriptorResponse: { + encode(message: _23.GetCodecDescriptorResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.GetCodecDescriptorResponse; + fromPartial(object: { + codec?: { + interfaces?: { + fullname?: string; + interfaceAcceptingMessages?: { + fullname?: string; + fieldDescriptorNames?: string[]; + }[]; + interfaceImplementers?: { + fullname?: string; + typeUrl?: string; + }[]; + }[]; + }; + }): _23.GetCodecDescriptorResponse; + }; + GetConfigurationDescriptorRequest: { + encode(_: _23.GetConfigurationDescriptorRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.GetConfigurationDescriptorRequest; + fromPartial(_: {}): _23.GetConfigurationDescriptorRequest; + }; + GetConfigurationDescriptorResponse: { + encode(message: _23.GetConfigurationDescriptorResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.GetConfigurationDescriptorResponse; + fromPartial(object: { + config?: { + bech32AccountAddressPrefix?: string; + }; + }): _23.GetConfigurationDescriptorResponse; + }; + GetQueryServicesDescriptorRequest: { + encode(_: _23.GetQueryServicesDescriptorRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.GetQueryServicesDescriptorRequest; + fromPartial(_: {}): _23.GetQueryServicesDescriptorRequest; + }; + GetQueryServicesDescriptorResponse: { + encode(message: _23.GetQueryServicesDescriptorResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.GetQueryServicesDescriptorResponse; + fromPartial(object: { + queries?: { + queryServices?: { + fullname?: string; + isModule?: boolean; + methods?: { + name?: string; + fullQueryPath?: string; + }[]; + }[]; + }; + }): _23.GetQueryServicesDescriptorResponse; + }; + GetTxDescriptorRequest: { + encode(_: _23.GetTxDescriptorRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.GetTxDescriptorRequest; + fromPartial(_: {}): _23.GetTxDescriptorRequest; + }; + GetTxDescriptorResponse: { + encode(message: _23.GetTxDescriptorResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.GetTxDescriptorResponse; + fromPartial(object: { + tx?: { + fullname?: string; + msgs?: { + msgTypeUrl?: string; + }[]; + }; + }): _23.GetTxDescriptorResponse; + }; + QueryServicesDescriptor: { + encode(message: _23.QueryServicesDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.QueryServicesDescriptor; + fromPartial(object: { + queryServices?: { + fullname?: string; + isModule?: boolean; + methods?: { + name?: string; + fullQueryPath?: string; + }[]; + }[]; + }): _23.QueryServicesDescriptor; + }; + QueryServiceDescriptor: { + encode(message: _23.QueryServiceDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.QueryServiceDescriptor; + fromPartial(object: { + fullname?: string; + isModule?: boolean; + methods?: { + name?: string; + fullQueryPath?: string; + }[]; + }): _23.QueryServiceDescriptor; + }; + QueryMethodDescriptor: { + encode(message: _23.QueryMethodDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _23.QueryMethodDescriptor; + fromPartial(object: { + name?: string; + fullQueryPath?: string; + }): _23.QueryMethodDescriptor; + }; + }; + } + namespace snapshots { + const v1beta1: { + Snapshot: { + encode(message: _24.Snapshot, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _24.Snapshot; + fromPartial(object: { + height?: string | number | import("long").Long; + format?: number; + chunks?: number; + hash?: Uint8Array; + metadata?: { + chunkHashes?: Uint8Array[]; + }; + }): _24.Snapshot; + }; + Metadata: { + encode(message: _24.Metadata, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _24.Metadata; + fromPartial(object: { + chunkHashes?: Uint8Array[]; + }): _24.Metadata; + }; + SnapshotItem: { + encode(message: _24.SnapshotItem, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _24.SnapshotItem; + fromPartial(object: { + store?: { + name?: string; + }; + iavl?: { + key?: Uint8Array; + value?: Uint8Array; + version?: string | number | import("long").Long; + height?: number; + }; + extension?: { + name?: string; + format?: number; + }; + extensionPayload?: { + payload?: Uint8Array; + }; + kv?: { + key?: Uint8Array; + value?: Uint8Array; + }; + schema?: { + keys?: Uint8Array[]; + }; + }): _24.SnapshotItem; + }; + SnapshotStoreItem: { + encode(message: _24.SnapshotStoreItem, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _24.SnapshotStoreItem; + fromPartial(object: { + name?: string; + }): _24.SnapshotStoreItem; + }; + SnapshotIAVLItem: { + encode(message: _24.SnapshotIAVLItem, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _24.SnapshotIAVLItem; + fromPartial(object: { + key?: Uint8Array; + value?: Uint8Array; + version?: string | number | import("long").Long; + height?: number; + }): _24.SnapshotIAVLItem; + }; + SnapshotExtensionMeta: { + encode(message: _24.SnapshotExtensionMeta, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _24.SnapshotExtensionMeta; + fromPartial(object: { + name?: string; + format?: number; + }): _24.SnapshotExtensionMeta; + }; + SnapshotExtensionPayload: { + encode(message: _24.SnapshotExtensionPayload, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _24.SnapshotExtensionPayload; + fromPartial(object: { + payload?: Uint8Array; + }): _24.SnapshotExtensionPayload; + }; + SnapshotKVItem: { + encode(message: _24.SnapshotKVItem, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _24.SnapshotKVItem; + fromPartial(object: { + key?: Uint8Array; + value?: Uint8Array; + }): _24.SnapshotKVItem; + }; + SnapshotSchema: { + encode(message: _24.SnapshotSchema, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _24.SnapshotSchema; + fromPartial(object: { + keys?: Uint8Array[]; + }): _24.SnapshotSchema; + }; + }; + } + namespace store { + const v1beta1: { + StoreKVPair: { + encode(message: _26.StoreKVPair, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _26.StoreKVPair; + fromPartial(object: { + storeKey?: string; + delete?: boolean; + key?: Uint8Array; + value?: Uint8Array; + }): _26.StoreKVPair; + }; + CommitInfo: { + encode(message: _25.CommitInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _25.CommitInfo; + fromPartial(object: { + version?: string | number | import("long").Long; + storeInfos?: { + name?: string; + commitId?: { + version?: string | number | import("long").Long; + hash?: Uint8Array; + }; + }[]; + }): _25.CommitInfo; + }; + StoreInfo: { + encode(message: _25.StoreInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _25.StoreInfo; + fromPartial(object: { + name?: string; + commitId?: { + version?: string | number | import("long").Long; + hash?: Uint8Array; + }; + }): _25.StoreInfo; + }; + CommitID: { + encode(message: _25.CommitID, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _25.CommitID; + fromPartial(object: { + version?: string | number | import("long").Long; + hash?: Uint8Array; + }): _25.CommitID; + }; + }; + } + namespace tendermint { + const v1beta1: { + ServiceClientImpl: typeof _166.ServiceClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + getNodeInfo(request?: _27.GetNodeInfoRequest): Promise<_27.GetNodeInfoResponse>; + getSyncing(request?: _27.GetSyncingRequest): Promise<_27.GetSyncingResponse>; + getLatestBlock(request?: _27.GetLatestBlockRequest): Promise<_27.GetLatestBlockResponse>; + getBlockByHeight(request: _27.GetBlockByHeightRequest): Promise<_27.GetBlockByHeightResponse>; + getLatestValidatorSet(request?: _27.GetLatestValidatorSetRequest): Promise<_27.GetLatestValidatorSetResponse>; + getValidatorSetByHeight(request: _27.GetValidatorSetByHeightRequest): Promise<_27.GetValidatorSetByHeightResponse>; + }; + LCDQueryClient: typeof _148.LCDQueryClient; + GetValidatorSetByHeightRequest: { + encode(message: _27.GetValidatorSetByHeightRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.GetValidatorSetByHeightRequest; + fromPartial(object: { + height?: string | number | import("long").Long; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _27.GetValidatorSetByHeightRequest; + }; + GetValidatorSetByHeightResponse: { + encode(message: _27.GetValidatorSetByHeightResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.GetValidatorSetByHeightResponse; + fromPartial(object: { + blockHeight?: string | number | import("long").Long; + validators?: { + address?: string; + pubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _27.GetValidatorSetByHeightResponse; + }; + GetLatestValidatorSetRequest: { + encode(message: _27.GetLatestValidatorSetRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.GetLatestValidatorSetRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _27.GetLatestValidatorSetRequest; + }; + GetLatestValidatorSetResponse: { + encode(message: _27.GetLatestValidatorSetResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.GetLatestValidatorSetResponse; + fromPartial(object: { + blockHeight?: string | number | import("long").Long; + validators?: { + address?: string; + pubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _27.GetLatestValidatorSetResponse; + }; + Validator: { + encode(message: _27.Validator, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.Validator; + fromPartial(object: { + address?: string; + pubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }): _27.Validator; + }; + GetBlockByHeightRequest: { + encode(message: _27.GetBlockByHeightRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.GetBlockByHeightRequest; + fromPartial(object: { + height?: string | number | import("long").Long; + }): _27.GetBlockByHeightRequest; + }; + GetBlockByHeightResponse: { + encode(message: _27.GetBlockByHeightResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.GetBlockByHeightResponse; + fromPartial(object: { + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + block?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + data?: { + txs?: Uint8Array[]; + }; + evidence?: { + evidence?: { + duplicateVoteEvidence?: { + voteA?: { + type?: import("../tendermint/types/types").SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + voteB?: { + type?: import("../tendermint/types/types").SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + totalVotingPower?: string | number | import("long").Long; + validatorPower?: string | number | import("long").Long; + timestamp?: Date; + }; + lightClientAttackEvidence?: { + conflictingBlock?: { + signedHeader?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + commit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: import("../tendermint/types/types").BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + validatorSet?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + }; + commonHeight?: string | number | import("long").Long; + byzantineValidators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + totalVotingPower?: string | number | import("long").Long; + timestamp?: Date; + }; + }[]; + }; + lastCommit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: import("../tendermint/types/types").BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + }): _27.GetBlockByHeightResponse; + }; + GetLatestBlockRequest: { + encode(_: _27.GetLatestBlockRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.GetLatestBlockRequest; + fromPartial(_: {}): _27.GetLatestBlockRequest; + }; + GetLatestBlockResponse: { + encode(message: _27.GetLatestBlockResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.GetLatestBlockResponse; + fromPartial(object: { + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + block?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + data?: { + txs?: Uint8Array[]; + }; + evidence?: { + evidence?: { + duplicateVoteEvidence?: { + voteA?: { + type?: import("../tendermint/types/types").SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + voteB?: { + type?: import("../tendermint/types/types").SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + totalVotingPower?: string | number | import("long").Long; + validatorPower?: string | number | import("long").Long; + timestamp?: Date; + }; + lightClientAttackEvidence?: { + conflictingBlock?: { + signedHeader?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + commit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: import("../tendermint/types/types").BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + validatorSet?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + }; + commonHeight?: string | number | import("long").Long; + byzantineValidators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + totalVotingPower?: string | number | import("long").Long; + timestamp?: Date; + }; + }[]; + }; + lastCommit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: import("../tendermint/types/types").BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + }): _27.GetLatestBlockResponse; + }; + GetSyncingRequest: { + encode(_: _27.GetSyncingRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.GetSyncingRequest; + fromPartial(_: {}): _27.GetSyncingRequest; + }; + GetSyncingResponse: { + encode(message: _27.GetSyncingResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.GetSyncingResponse; + fromPartial(object: { + syncing?: boolean; + }): _27.GetSyncingResponse; + }; + GetNodeInfoRequest: { + encode(_: _27.GetNodeInfoRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.GetNodeInfoRequest; + fromPartial(_: {}): _27.GetNodeInfoRequest; + }; + GetNodeInfoResponse: { + encode(message: _27.GetNodeInfoResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.GetNodeInfoResponse; + fromPartial(object: { + nodeInfo?: { + protocolVersion?: { + p2p?: string | number | import("long").Long; + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + nodeId?: string; + listenAddr?: string; + network?: string; + version?: string; + channels?: Uint8Array; + moniker?: string; + other?: { + txIndex?: string; + rpcAddress?: string; + }; + }; + applicationVersion?: { + name?: string; + appName?: string; + version?: string; + gitCommit?: string; + buildTags?: string; + goVersion?: string; + buildDeps?: { + path?: string; + version?: string; + sum?: string; + }[]; + cosmosSdkVersion?: string; + }; + }): _27.GetNodeInfoResponse; + }; + VersionInfo: { + encode(message: _27.VersionInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.VersionInfo; + fromPartial(object: { + name?: string; + appName?: string; + version?: string; + gitCommit?: string; + buildTags?: string; + goVersion?: string; + buildDeps?: { + path?: string; + version?: string; + sum?: string; + }[]; + cosmosSdkVersion?: string; + }): _27.VersionInfo; + }; + Module: { + encode(message: _27.Module, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _27.Module; + fromPartial(object: { + path?: string; + version?: string; + sum?: string; + }): _27.Module; + }; + }; + } + const v1beta1: { + Coin: { + encode(message: _28.Coin, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _28.Coin; + fromPartial(object: { + denom?: string; + amount?: string; + }): _28.Coin; + }; + DecCoin: { + encode(message: _28.DecCoin, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _28.DecCoin; + fromPartial(object: { + denom?: string; + amount?: string; + }): _28.DecCoin; + }; + IntProto: { + encode(message: _28.IntProto, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _28.IntProto; + fromPartial(object: { + int?: string; + }): _28.IntProto; + }; + DecProto: { + encode(message: _28.DecProto, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _28.DecProto; + fromPartial(object: { + dec?: string; + }): _28.DecProto; + }; + }; + } + namespace capability { + const v1beta1: { + GenesisOwners: { + encode(message: _30.GenesisOwners, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _30.GenesisOwners; + fromPartial(object: { + index?: string | number | import("long").Long; + indexOwners?: { + owners?: { + module?: string; + name?: string; + }[]; + }; + }): _30.GenesisOwners; + }; + GenesisState: { + encode(message: _30.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _30.GenesisState; + fromPartial(object: { + index?: string | number | import("long").Long; + owners?: { + index?: string | number | import("long").Long; + indexOwners?: { + owners?: { + module?: string; + name?: string; + }[]; + }; + }[]; + }): _30.GenesisState; + }; + Capability: { + encode(message: _29.Capability, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _29.Capability; + fromPartial(object: { + index?: string | number | import("long").Long; + }): _29.Capability; + }; + Owner: { + encode(message: _29.Owner, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _29.Owner; + fromPartial(object: { + module?: string; + name?: string; + }): _29.Owner; + }; + CapabilityOwners: { + encode(message: _29.CapabilityOwners, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _29.CapabilityOwners; + fromPartial(object: { + owners?: { + module?: string; + name?: string; + }[]; + }): _29.CapabilityOwners; + }; + }; + } + namespace crisis { + const v1beta1: { + MsgClientImpl: typeof _182.MsgClientImpl; + MsgVerifyInvariant: { + encode(message: _32.MsgVerifyInvariant, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _32.MsgVerifyInvariant; + fromPartial(object: { + sender?: string; + invariantModuleName?: string; + invariantRoute?: string; + }): _32.MsgVerifyInvariant; + }; + MsgVerifyInvariantResponse: { + encode(_: _32.MsgVerifyInvariantResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _32.MsgVerifyInvariantResponse; + fromPartial(_: {}): _32.MsgVerifyInvariantResponse; + }; + GenesisState: { + encode(message: _31.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _31.GenesisState; + fromPartial(object: { + constantFee?: { + denom?: string; + amount?: string; + }; + }): _31.GenesisState; + }; + }; + } + namespace crypto { + const ed25519: { + PubKey: { + encode(message: _33.PubKey, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _33.PubKey; + fromPartial(object: { + key?: Uint8Array; + }): _33.PubKey; + }; + PrivKey: { + encode(message: _33.PrivKey, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _33.PrivKey; + fromPartial(object: { + key?: Uint8Array; + }): _33.PrivKey; + }; + }; + namespace hd { + const v1: { + BIP44Params: { + encode(message: _34.BIP44Params, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _34.BIP44Params; + fromPartial(object: { + purpose?: number; + coinType?: number; + account?: number; + change?: boolean; + addressIndex?: number; + }): _34.BIP44Params; + }; + }; + } + namespace keyring { + const v1: { + Record: { + encode(message: _35.Record, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _35.Record; + fromPartial(object: { + name?: string; + pubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + local?: { + privKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + privKeyType?: string; + }; + ledger?: { + path?: { + purpose?: number; + coinType?: number; + account?: number; + change?: boolean; + addressIndex?: number; + }; + }; + multi?: {}; + offline?: {}; + }): _35.Record; + }; + Record_Local: { + encode(message: _35.Record_Local, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _35.Record_Local; + fromPartial(object: { + privKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + privKeyType?: string; + }): _35.Record_Local; + }; + Record_Ledger: { + encode(message: _35.Record_Ledger, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _35.Record_Ledger; + fromPartial(object: { + path?: { + purpose?: number; + coinType?: number; + account?: number; + change?: boolean; + addressIndex?: number; + }; + }): _35.Record_Ledger; + }; + Record_Multi: { + encode(_: _35.Record_Multi, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _35.Record_Multi; + fromPartial(_: {}): _35.Record_Multi; + }; + Record_Offline: { + encode(_: _35.Record_Offline, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _35.Record_Offline; + fromPartial(_: {}): _35.Record_Offline; + }; + }; + } + const multisig: { + LegacyAminoPubKey: { + encode(message: _36.LegacyAminoPubKey, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _36.LegacyAminoPubKey; + fromPartial(object: { + threshold?: number; + publicKeys?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }): _36.LegacyAminoPubKey; + }; + }; + const secp256k1: { + PubKey: { + encode(message: _37.PubKey, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _37.PubKey; + fromPartial(object: { + key?: Uint8Array; + }): _37.PubKey; + }; + PrivKey: { + encode(message: _37.PrivKey, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _37.PrivKey; + fromPartial(object: { + key?: Uint8Array; + }): _37.PrivKey; + }; + }; + const secp256r1: { + PubKey: { + encode(message: _38.PubKey, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _38.PubKey; + fromPartial(object: { + key?: Uint8Array; + }): _38.PubKey; + }; + PrivKey: { + encode(message: _38.PrivKey, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _38.PrivKey; + fromPartial(object: { + secret?: Uint8Array; + }): _38.PrivKey; + }; + }; + } + namespace distribution { + const v1beta1: { + MsgClientImpl: typeof _183.MsgClientImpl; + QueryClientImpl: typeof _167.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + params(request?: _41.QueryParamsRequest): Promise<_41.QueryParamsResponse>; + validatorOutstandingRewards(request: _41.QueryValidatorOutstandingRewardsRequest): Promise<_41.QueryValidatorOutstandingRewardsResponse>; + validatorCommission(request: _41.QueryValidatorCommissionRequest): Promise<_41.QueryValidatorCommissionResponse>; + validatorSlashes(request: _41.QueryValidatorSlashesRequest): Promise<_41.QueryValidatorSlashesResponse>; + delegationRewards(request: _41.QueryDelegationRewardsRequest): Promise<_41.QueryDelegationRewardsResponse>; + delegationTotalRewards(request: _41.QueryDelegationTotalRewardsRequest): Promise<_41.QueryDelegationTotalRewardsResponse>; + delegatorValidators(request: _41.QueryDelegatorValidatorsRequest): Promise<_41.QueryDelegatorValidatorsResponse>; + delegatorWithdrawAddress(request: _41.QueryDelegatorWithdrawAddressRequest): Promise<_41.QueryDelegatorWithdrawAddressResponse>; + communityPool(request?: _41.QueryCommunityPoolRequest): Promise<_41.QueryCommunityPoolResponse>; + }; + LCDQueryClient: typeof _149.LCDQueryClient; + MsgSetWithdrawAddress: { + encode(message: _42.MsgSetWithdrawAddress, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _42.MsgSetWithdrawAddress; + fromPartial(object: { + delegatorAddress?: string; + withdrawAddress?: string; + }): _42.MsgSetWithdrawAddress; + }; + MsgSetWithdrawAddressResponse: { + encode(_: _42.MsgSetWithdrawAddressResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _42.MsgSetWithdrawAddressResponse; + fromPartial(_: {}): _42.MsgSetWithdrawAddressResponse; + }; + MsgWithdrawDelegatorReward: { + encode(message: _42.MsgWithdrawDelegatorReward, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _42.MsgWithdrawDelegatorReward; + fromPartial(object: { + delegatorAddress?: string; + validatorAddress?: string; + }): _42.MsgWithdrawDelegatorReward; + }; + MsgWithdrawDelegatorRewardResponse: { + encode(message: _42.MsgWithdrawDelegatorRewardResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _42.MsgWithdrawDelegatorRewardResponse; + fromPartial(object: { + amount?: { + denom?: string; + amount?: string; + }[]; + }): _42.MsgWithdrawDelegatorRewardResponse; + }; + MsgWithdrawValidatorCommission: { + encode(message: _42.MsgWithdrawValidatorCommission, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _42.MsgWithdrawValidatorCommission; + fromPartial(object: { + validatorAddress?: string; + }): _42.MsgWithdrawValidatorCommission; + }; + MsgWithdrawValidatorCommissionResponse: { + encode(message: _42.MsgWithdrawValidatorCommissionResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _42.MsgWithdrawValidatorCommissionResponse; + fromPartial(object: { + amount?: { + denom?: string; + amount?: string; + }[]; + }): _42.MsgWithdrawValidatorCommissionResponse; + }; + MsgFundCommunityPool: { + encode(message: _42.MsgFundCommunityPool, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _42.MsgFundCommunityPool; + fromPartial(object: { + amount?: { + denom?: string; + amount?: string; + }[]; + depositor?: string; + }): _42.MsgFundCommunityPool; + }; + MsgFundCommunityPoolResponse: { + encode(_: _42.MsgFundCommunityPoolResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _42.MsgFundCommunityPoolResponse; + fromPartial(_: {}): _42.MsgFundCommunityPoolResponse; + }; + QueryParamsRequest: { + encode(_: _41.QueryParamsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryParamsRequest; + fromPartial(_: {}): _41.QueryParamsRequest; + }; + QueryParamsResponse: { + encode(message: _41.QueryParamsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryParamsResponse; + fromPartial(object: { + params?: { + communityTax?: string; + baseProposerReward?: string; + bonusProposerReward?: string; + withdrawAddrEnabled?: boolean; + }; + }): _41.QueryParamsResponse; + }; + QueryValidatorOutstandingRewardsRequest: { + encode(message: _41.QueryValidatorOutstandingRewardsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryValidatorOutstandingRewardsRequest; + fromPartial(object: { + validatorAddress?: string; + }): _41.QueryValidatorOutstandingRewardsRequest; + }; + QueryValidatorOutstandingRewardsResponse: { + encode(message: _41.QueryValidatorOutstandingRewardsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryValidatorOutstandingRewardsResponse; + fromPartial(object: { + rewards?: { + rewards?: { + denom?: string; + amount?: string; + }[]; + }; + }): _41.QueryValidatorOutstandingRewardsResponse; + }; + QueryValidatorCommissionRequest: { + encode(message: _41.QueryValidatorCommissionRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryValidatorCommissionRequest; + fromPartial(object: { + validatorAddress?: string; + }): _41.QueryValidatorCommissionRequest; + }; + QueryValidatorCommissionResponse: { + encode(message: _41.QueryValidatorCommissionResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryValidatorCommissionResponse; + fromPartial(object: { + commission?: { + commission?: { + denom?: string; + amount?: string; + }[]; + }; + }): _41.QueryValidatorCommissionResponse; + }; + QueryValidatorSlashesRequest: { + encode(message: _41.QueryValidatorSlashesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryValidatorSlashesRequest; + fromPartial(object: { + validatorAddress?: string; + startingHeight?: string | number | import("long").Long; + endingHeight?: string | number | import("long").Long; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _41.QueryValidatorSlashesRequest; + }; + QueryValidatorSlashesResponse: { + encode(message: _41.QueryValidatorSlashesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryValidatorSlashesResponse; + fromPartial(object: { + slashes?: { + validatorPeriod?: string | number | import("long").Long; + fraction?: string; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _41.QueryValidatorSlashesResponse; + }; + QueryDelegationRewardsRequest: { + encode(message: _41.QueryDelegationRewardsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryDelegationRewardsRequest; + fromPartial(object: { + delegatorAddress?: string; + validatorAddress?: string; + }): _41.QueryDelegationRewardsRequest; + }; + QueryDelegationRewardsResponse: { + encode(message: _41.QueryDelegationRewardsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryDelegationRewardsResponse; + fromPartial(object: { + rewards?: { + denom?: string; + amount?: string; + }[]; + }): _41.QueryDelegationRewardsResponse; + }; + QueryDelegationTotalRewardsRequest: { + encode(message: _41.QueryDelegationTotalRewardsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryDelegationTotalRewardsRequest; + fromPartial(object: { + delegatorAddress?: string; + }): _41.QueryDelegationTotalRewardsRequest; + }; + QueryDelegationTotalRewardsResponse: { + encode(message: _41.QueryDelegationTotalRewardsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryDelegationTotalRewardsResponse; + fromPartial(object: { + rewards?: { + validatorAddress?: string; + reward?: { + denom?: string; + amount?: string; + }[]; + }[]; + total?: { + denom?: string; + amount?: string; + }[]; + }): _41.QueryDelegationTotalRewardsResponse; + }; + QueryDelegatorValidatorsRequest: { + encode(message: _41.QueryDelegatorValidatorsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryDelegatorValidatorsRequest; + fromPartial(object: { + delegatorAddress?: string; + }): _41.QueryDelegatorValidatorsRequest; + }; + QueryDelegatorValidatorsResponse: { + encode(message: _41.QueryDelegatorValidatorsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryDelegatorValidatorsResponse; + fromPartial(object: { + validators?: string[]; + }): _41.QueryDelegatorValidatorsResponse; + }; + QueryDelegatorWithdrawAddressRequest: { + encode(message: _41.QueryDelegatorWithdrawAddressRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryDelegatorWithdrawAddressRequest; + fromPartial(object: { + delegatorAddress?: string; + }): _41.QueryDelegatorWithdrawAddressRequest; + }; + QueryDelegatorWithdrawAddressResponse: { + encode(message: _41.QueryDelegatorWithdrawAddressResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryDelegatorWithdrawAddressResponse; + fromPartial(object: { + withdrawAddress?: string; + }): _41.QueryDelegatorWithdrawAddressResponse; + }; + QueryCommunityPoolRequest: { + encode(_: _41.QueryCommunityPoolRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryCommunityPoolRequest; + fromPartial(_: {}): _41.QueryCommunityPoolRequest; + }; + QueryCommunityPoolResponse: { + encode(message: _41.QueryCommunityPoolResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _41.QueryCommunityPoolResponse; + fromPartial(object: { + pool?: { + denom?: string; + amount?: string; + }[]; + }): _41.QueryCommunityPoolResponse; + }; + DelegatorWithdrawInfo: { + encode(message: _40.DelegatorWithdrawInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _40.DelegatorWithdrawInfo; + fromPartial(object: { + delegatorAddress?: string; + withdrawAddress?: string; + }): _40.DelegatorWithdrawInfo; + }; + ValidatorOutstandingRewardsRecord: { + encode(message: _40.ValidatorOutstandingRewardsRecord, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _40.ValidatorOutstandingRewardsRecord; + fromPartial(object: { + validatorAddress?: string; + outstandingRewards?: { + denom?: string; + amount?: string; + }[]; + }): _40.ValidatorOutstandingRewardsRecord; + }; + ValidatorAccumulatedCommissionRecord: { + encode(message: _40.ValidatorAccumulatedCommissionRecord, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _40.ValidatorAccumulatedCommissionRecord; + fromPartial(object: { + validatorAddress?: string; + accumulated?: { + commission?: { + denom?: string; + amount?: string; + }[]; + }; + }): _40.ValidatorAccumulatedCommissionRecord; + }; + ValidatorHistoricalRewardsRecord: { + encode(message: _40.ValidatorHistoricalRewardsRecord, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _40.ValidatorHistoricalRewardsRecord; + fromPartial(object: { + validatorAddress?: string; + period?: string | number | import("long").Long; + rewards?: { + cumulativeRewardRatio?: { + denom?: string; + amount?: string; + }[]; + referenceCount?: number; + }; + }): _40.ValidatorHistoricalRewardsRecord; + }; + ValidatorCurrentRewardsRecord: { + encode(message: _40.ValidatorCurrentRewardsRecord, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _40.ValidatorCurrentRewardsRecord; + fromPartial(object: { + validatorAddress?: string; + rewards?: { + rewards?: { + denom?: string; + amount?: string; + }[]; + period?: string | number | import("long").Long; + }; + }): _40.ValidatorCurrentRewardsRecord; + }; + DelegatorStartingInfoRecord: { + encode(message: _40.DelegatorStartingInfoRecord, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _40.DelegatorStartingInfoRecord; + fromPartial(object: { + delegatorAddress?: string; + validatorAddress?: string; + startingInfo?: { + previousPeriod?: string | number | import("long").Long; + stake?: string; + height?: string | number | import("long").Long; + }; + }): _40.DelegatorStartingInfoRecord; + }; + ValidatorSlashEventRecord: { + encode(message: _40.ValidatorSlashEventRecord, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _40.ValidatorSlashEventRecord; + fromPartial(object: { + validatorAddress?: string; + height?: string | number | import("long").Long; + period?: string | number | import("long").Long; + validatorSlashEvent?: { + validatorPeriod?: string | number | import("long").Long; + fraction?: string; + }; + }): _40.ValidatorSlashEventRecord; + }; + GenesisState: { + encode(message: _40.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _40.GenesisState; + fromPartial(object: { + params?: { + communityTax?: string; + baseProposerReward?: string; + bonusProposerReward?: string; + withdrawAddrEnabled?: boolean; + }; + feePool?: { + communityPool?: { + denom?: string; + amount?: string; + }[]; + }; + delegatorWithdrawInfos?: { + delegatorAddress?: string; + withdrawAddress?: string; + }[]; + previousProposer?: string; + outstandingRewards?: { + validatorAddress?: string; + outstandingRewards?: { + denom?: string; + amount?: string; + }[]; + }[]; + validatorAccumulatedCommissions?: { + validatorAddress?: string; + accumulated?: { + commission?: { + denom?: string; + amount?: string; + }[]; + }; + }[]; + validatorHistoricalRewards?: { + validatorAddress?: string; + period?: string | number | import("long").Long; + rewards?: { + cumulativeRewardRatio?: { + denom?: string; + amount?: string; + }[]; + referenceCount?: number; + }; + }[]; + validatorCurrentRewards?: { + validatorAddress?: string; + rewards?: { + rewards?: { + denom?: string; + amount?: string; + }[]; + period?: string | number | import("long").Long; + }; + }[]; + delegatorStartingInfos?: { + delegatorAddress?: string; + validatorAddress?: string; + startingInfo?: { + previousPeriod?: string | number | import("long").Long; + stake?: string; + height?: string | number | import("long").Long; + }; + }[]; + validatorSlashEvents?: { + validatorAddress?: string; + height?: string | number | import("long").Long; + period?: string | number | import("long").Long; + validatorSlashEvent?: { + validatorPeriod?: string | number | import("long").Long; + fraction?: string; + }; + }[]; + }): _40.GenesisState; + }; + Params: { + encode(message: _39.Params, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _39.Params; + fromPartial(object: { + communityTax?: string; + baseProposerReward?: string; + bonusProposerReward?: string; + withdrawAddrEnabled?: boolean; + }): _39.Params; + }; + ValidatorHistoricalRewards: { + encode(message: _39.ValidatorHistoricalRewards, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _39.ValidatorHistoricalRewards; + fromPartial(object: { + cumulativeRewardRatio?: { + denom?: string; + amount?: string; + }[]; + referenceCount?: number; + }): _39.ValidatorHistoricalRewards; + }; + ValidatorCurrentRewards: { + encode(message: _39.ValidatorCurrentRewards, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _39.ValidatorCurrentRewards; + fromPartial(object: { + rewards?: { + denom?: string; + amount?: string; + }[]; + period?: string | number | import("long").Long; + }): _39.ValidatorCurrentRewards; + }; + ValidatorAccumulatedCommission: { + encode(message: _39.ValidatorAccumulatedCommission, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _39.ValidatorAccumulatedCommission; + fromPartial(object: { + commission?: { + denom?: string; + amount?: string; + }[]; + }): _39.ValidatorAccumulatedCommission; + }; + ValidatorOutstandingRewards: { + encode(message: _39.ValidatorOutstandingRewards, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _39.ValidatorOutstandingRewards; + fromPartial(object: { + rewards?: { + denom?: string; + amount?: string; + }[]; + }): _39.ValidatorOutstandingRewards; + }; + ValidatorSlashEvent: { + encode(message: _39.ValidatorSlashEvent, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _39.ValidatorSlashEvent; + fromPartial(object: { + validatorPeriod?: string | number | import("long").Long; + fraction?: string; + }): _39.ValidatorSlashEvent; + }; + ValidatorSlashEvents: { + encode(message: _39.ValidatorSlashEvents, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _39.ValidatorSlashEvents; + fromPartial(object: { + validatorSlashEvents?: { + validatorPeriod?: string | number | import("long").Long; + fraction?: string; + }[]; + }): _39.ValidatorSlashEvents; + }; + FeePool: { + encode(message: _39.FeePool, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _39.FeePool; + fromPartial(object: { + communityPool?: { + denom?: string; + amount?: string; + }[]; + }): _39.FeePool; + }; + CommunityPoolSpendProposal: { + encode(message: _39.CommunityPoolSpendProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _39.CommunityPoolSpendProposal; + fromPartial(object: { + title?: string; + description?: string; + recipient?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }): _39.CommunityPoolSpendProposal; + }; + DelegatorStartingInfo: { + encode(message: _39.DelegatorStartingInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _39.DelegatorStartingInfo; + fromPartial(object: { + previousPeriod?: string | number | import("long").Long; + stake?: string; + height?: string | number | import("long").Long; + }): _39.DelegatorStartingInfo; + }; + DelegationDelegatorReward: { + encode(message: _39.DelegationDelegatorReward, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _39.DelegationDelegatorReward; + fromPartial(object: { + validatorAddress?: string; + reward?: { + denom?: string; + amount?: string; + }[]; + }): _39.DelegationDelegatorReward; + }; + CommunityPoolSpendProposalWithDeposit: { + encode(message: _39.CommunityPoolSpendProposalWithDeposit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _39.CommunityPoolSpendProposalWithDeposit; + fromPartial(object: { + title?: string; + description?: string; + recipient?: string; + amount?: string; + deposit?: string; + }): _39.CommunityPoolSpendProposalWithDeposit; + }; + }; + } + namespace evidence { + const v1beta1: { + MsgClientImpl: typeof _184.MsgClientImpl; + QueryClientImpl: typeof _168.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + evidence(request: _45.QueryEvidenceRequest): Promise<_45.QueryEvidenceResponse>; + allEvidence(request?: _45.QueryAllEvidenceRequest): Promise<_45.QueryAllEvidenceResponse>; + }; + LCDQueryClient: typeof _150.LCDQueryClient; + MsgSubmitEvidence: { + encode(message: _46.MsgSubmitEvidence, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _46.MsgSubmitEvidence; + fromPartial(object: { + submitter?: string; + evidence?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _46.MsgSubmitEvidence; + }; + MsgSubmitEvidenceResponse: { + encode(message: _46.MsgSubmitEvidenceResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _46.MsgSubmitEvidenceResponse; + fromPartial(object: { + hash?: Uint8Array; + }): _46.MsgSubmitEvidenceResponse; + }; + QueryEvidenceRequest: { + encode(message: _45.QueryEvidenceRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _45.QueryEvidenceRequest; + fromPartial(object: { + evidenceHash?: Uint8Array; + }): _45.QueryEvidenceRequest; + }; + QueryEvidenceResponse: { + encode(message: _45.QueryEvidenceResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _45.QueryEvidenceResponse; + fromPartial(object: { + evidence?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _45.QueryEvidenceResponse; + }; + QueryAllEvidenceRequest: { + encode(message: _45.QueryAllEvidenceRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _45.QueryAllEvidenceRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _45.QueryAllEvidenceRequest; + }; + QueryAllEvidenceResponse: { + encode(message: _45.QueryAllEvidenceResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _45.QueryAllEvidenceResponse; + fromPartial(object: { + evidence?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _45.QueryAllEvidenceResponse; + }; + GenesisState: { + encode(message: _44.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _44.GenesisState; + fromPartial(object: { + evidence?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }): _44.GenesisState; + }; + Equivocation: { + encode(message: _43.Equivocation, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _43.Equivocation; + fromPartial(object: { + height?: string | number | import("long").Long; + time?: Date; + power?: string | number | import("long").Long; + consensusAddress?: string; + }): _43.Equivocation; + }; + }; + } + namespace feegrant { + const v1beta1: { + MsgClientImpl: typeof _185.MsgClientImpl; + QueryClientImpl: typeof _169.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + allowance(request: _49.QueryAllowanceRequest): Promise<_49.QueryAllowanceResponse>; + allowances(request: _49.QueryAllowancesRequest): Promise<_49.QueryAllowancesResponse>; + allowancesByGranter(request: _49.QueryAllowancesByGranterRequest): Promise<_49.QueryAllowancesByGranterResponse>; + }; + LCDQueryClient: typeof _151.LCDQueryClient; + MsgGrantAllowance: { + encode(message: _50.MsgGrantAllowance, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _50.MsgGrantAllowance; + fromPartial(object: { + granter?: string; + grantee?: string; + allowance?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _50.MsgGrantAllowance; + }; + MsgGrantAllowanceResponse: { + encode(_: _50.MsgGrantAllowanceResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _50.MsgGrantAllowanceResponse; + fromPartial(_: {}): _50.MsgGrantAllowanceResponse; + }; + MsgRevokeAllowance: { + encode(message: _50.MsgRevokeAllowance, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _50.MsgRevokeAllowance; + fromPartial(object: { + granter?: string; + grantee?: string; + }): _50.MsgRevokeAllowance; + }; + MsgRevokeAllowanceResponse: { + encode(_: _50.MsgRevokeAllowanceResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _50.MsgRevokeAllowanceResponse; + fromPartial(_: {}): _50.MsgRevokeAllowanceResponse; + }; + QueryAllowanceRequest: { + encode(message: _49.QueryAllowanceRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _49.QueryAllowanceRequest; + fromPartial(object: { + granter?: string; + grantee?: string; + }): _49.QueryAllowanceRequest; + }; + QueryAllowanceResponse: { + encode(message: _49.QueryAllowanceResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _49.QueryAllowanceResponse; + fromPartial(object: { + allowance?: { + granter?: string; + grantee?: string; + allowance?: { + typeUrl?: string; + value?: Uint8Array; + }; + }; + }): _49.QueryAllowanceResponse; + }; + QueryAllowancesRequest: { + encode(message: _49.QueryAllowancesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _49.QueryAllowancesRequest; + fromPartial(object: { + grantee?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _49.QueryAllowancesRequest; + }; + QueryAllowancesResponse: { + encode(message: _49.QueryAllowancesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _49.QueryAllowancesResponse; + fromPartial(object: { + allowances?: { + granter?: string; + grantee?: string; + allowance?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _49.QueryAllowancesResponse; + }; + QueryAllowancesByGranterRequest: { + encode(message: _49.QueryAllowancesByGranterRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _49.QueryAllowancesByGranterRequest; + fromPartial(object: { + granter?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _49.QueryAllowancesByGranterRequest; + }; + QueryAllowancesByGranterResponse: { + encode(message: _49.QueryAllowancesByGranterResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _49.QueryAllowancesByGranterResponse; + fromPartial(object: { + allowances?: { + granter?: string; + grantee?: string; + allowance?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _49.QueryAllowancesByGranterResponse; + }; + GenesisState: { + encode(message: _48.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _48.GenesisState; + fromPartial(object: { + allowances?: { + granter?: string; + grantee?: string; + allowance?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + }): _48.GenesisState; + }; + BasicAllowance: { + encode(message: _47.BasicAllowance, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _47.BasicAllowance; + fromPartial(object: { + spendLimit?: { + denom?: string; + amount?: string; + }[]; + expiration?: Date; + }): _47.BasicAllowance; + }; + PeriodicAllowance: { + encode(message: _47.PeriodicAllowance, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _47.PeriodicAllowance; + fromPartial(object: { + basic?: { + spendLimit?: { + denom?: string; + amount?: string; + }[]; + expiration?: Date; + }; + period?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + periodSpendLimit?: { + denom?: string; + amount?: string; + }[]; + periodCanSpend?: { + denom?: string; + amount?: string; + }[]; + periodReset?: Date; + }): _47.PeriodicAllowance; + }; + AllowedMsgAllowance: { + encode(message: _47.AllowedMsgAllowance, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _47.AllowedMsgAllowance; + fromPartial(object: { + allowance?: { + typeUrl?: string; + value?: Uint8Array; + }; + allowedMessages?: string[]; + }): _47.AllowedMsgAllowance; + }; + Grant: { + encode(message: _47.Grant, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _47.Grant; + fromPartial(object: { + granter?: string; + grantee?: string; + allowance?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _47.Grant; + }; + }; + } + namespace genutil { + const v1beta1: { + GenesisState: { + encode(message: _51.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _51.GenesisState; + fromPartial(object: { + genTxs?: Uint8Array[]; + }): _51.GenesisState; + }; + }; + } + namespace gov { + const v1: { + MsgClientImpl: typeof _186.MsgClientImpl; + QueryClientImpl: typeof _170.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + proposal(request: _54.QueryProposalRequest): Promise<_54.QueryProposalResponse>; + proposals(request: _54.QueryProposalsRequest): Promise<_54.QueryProposalsResponse>; + vote(request: _54.QueryVoteRequest): Promise<_54.QueryVoteResponse>; + votes(request: _54.QueryVotesRequest): Promise<_54.QueryVotesResponse>; + params(request: _54.QueryParamsRequest): Promise<_54.QueryParamsResponse>; + deposit(request: _54.QueryDepositRequest): Promise<_54.QueryDepositResponse>; + deposits(request: _54.QueryDepositsRequest): Promise<_54.QueryDepositsResponse>; + tallyResult(request: _54.QueryTallyResultRequest): Promise<_54.QueryTallyResultResponse>; + }; + LCDQueryClient: typeof _152.LCDQueryClient; + MsgSubmitProposal: { + encode(message: _55.MsgSubmitProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _55.MsgSubmitProposal; + fromPartial(object: { + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + initialDeposit?: { + denom?: string; + amount?: string; + }[]; + proposer?: string; + metadata?: string; + }): _55.MsgSubmitProposal; + }; + MsgSubmitProposalResponse: { + encode(message: _55.MsgSubmitProposalResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _55.MsgSubmitProposalResponse; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + }): _55.MsgSubmitProposalResponse; + }; + MsgExecLegacyContent: { + encode(message: _55.MsgExecLegacyContent, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _55.MsgExecLegacyContent; + fromPartial(object: { + content?: { + typeUrl?: string; + value?: Uint8Array; + }; + authority?: string; + }): _55.MsgExecLegacyContent; + }; + MsgExecLegacyContentResponse: { + encode(_: _55.MsgExecLegacyContentResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _55.MsgExecLegacyContentResponse; + fromPartial(_: {}): _55.MsgExecLegacyContentResponse; + }; + MsgVote: { + encode(message: _55.MsgVote, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _55.MsgVote; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + voter?: string; + option?: _53.VoteOption; + metadata?: string; + }): _55.MsgVote; + }; + MsgVoteResponse: { + encode(_: _55.MsgVoteResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _55.MsgVoteResponse; + fromPartial(_: {}): _55.MsgVoteResponse; + }; + MsgVoteWeighted: { + encode(message: _55.MsgVoteWeighted, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _55.MsgVoteWeighted; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + voter?: string; + options?: { + option?: _53.VoteOption; + weight?: string; + }[]; + metadata?: string; + }): _55.MsgVoteWeighted; + }; + MsgVoteWeightedResponse: { + encode(_: _55.MsgVoteWeightedResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _55.MsgVoteWeightedResponse; + fromPartial(_: {}): _55.MsgVoteWeightedResponse; + }; + MsgDeposit: { + encode(message: _55.MsgDeposit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _55.MsgDeposit; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + depositor?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }): _55.MsgDeposit; + }; + MsgDepositResponse: { + encode(_: _55.MsgDepositResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _55.MsgDepositResponse; + fromPartial(_: {}): _55.MsgDepositResponse; + }; + QueryProposalRequest: { + encode(message: _54.QueryProposalRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryProposalRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + }): _54.QueryProposalRequest; + }; + QueryProposalResponse: { + encode(message: _54.QueryProposalResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryProposalResponse; + fromPartial(object: { + proposal?: { + id?: string | number | import("long").Long; + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + status?: _53.ProposalStatus; + finalTallyResult?: { + yesCount?: string; + abstainCount?: string; + noCount?: string; + noWithVetoCount?: string; + }; + submitTime?: Date; + depositEndTime?: Date; + totalDeposit?: { + denom?: string; + amount?: string; + }[]; + votingStartTime?: Date; + votingEndTime?: Date; + metadata?: string; + }; + }): _54.QueryProposalResponse; + }; + QueryProposalsRequest: { + encode(message: _54.QueryProposalsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryProposalsRequest; + fromPartial(object: { + proposalStatus?: _53.ProposalStatus; + voter?: string; + depositor?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _54.QueryProposalsRequest; + }; + QueryProposalsResponse: { + encode(message: _54.QueryProposalsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryProposalsResponse; + fromPartial(object: { + proposals?: { + id?: string | number | import("long").Long; + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + status?: _53.ProposalStatus; + finalTallyResult?: { + yesCount?: string; + abstainCount?: string; + noCount?: string; + noWithVetoCount?: string; + }; + submitTime?: Date; + depositEndTime?: Date; + totalDeposit?: { + denom?: string; + amount?: string; + }[]; + votingStartTime?: Date; + votingEndTime?: Date; + metadata?: string; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _54.QueryProposalsResponse; + }; + QueryVoteRequest: { + encode(message: _54.QueryVoteRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryVoteRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + voter?: string; + }): _54.QueryVoteRequest; + }; + QueryVoteResponse: { + encode(message: _54.QueryVoteResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryVoteResponse; + fromPartial(object: { + vote?: { + proposalId?: string | number | import("long").Long; + voter?: string; + options?: { + option?: _53.VoteOption; + weight?: string; + }[]; + metadata?: string; + }; + }): _54.QueryVoteResponse; + }; + QueryVotesRequest: { + encode(message: _54.QueryVotesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryVotesRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _54.QueryVotesRequest; + }; + QueryVotesResponse: { + encode(message: _54.QueryVotesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryVotesResponse; + fromPartial(object: { + votes?: { + proposalId?: string | number | import("long").Long; + voter?: string; + options?: { + option?: _53.VoteOption; + weight?: string; + }[]; + metadata?: string; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _54.QueryVotesResponse; + }; + QueryParamsRequest: { + encode(message: _54.QueryParamsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryParamsRequest; + fromPartial(object: { + paramsType?: string; + }): _54.QueryParamsRequest; + }; + QueryParamsResponse: { + encode(message: _54.QueryParamsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryParamsResponse; + fromPartial(object: { + votingParams?: { + votingPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }; + depositParams?: { + minDeposit?: { + denom?: string; + amount?: string; + }[]; + maxDepositPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }; + tallyParams?: { + quorum?: string; + threshold?: string; + vetoThreshold?: string; + }; + }): _54.QueryParamsResponse; + }; + QueryDepositRequest: { + encode(message: _54.QueryDepositRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryDepositRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + depositor?: string; + }): _54.QueryDepositRequest; + }; + QueryDepositResponse: { + encode(message: _54.QueryDepositResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryDepositResponse; + fromPartial(object: { + deposit?: { + proposalId?: string | number | import("long").Long; + depositor?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }; + }): _54.QueryDepositResponse; + }; + QueryDepositsRequest: { + encode(message: _54.QueryDepositsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryDepositsRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _54.QueryDepositsRequest; + }; + QueryDepositsResponse: { + encode(message: _54.QueryDepositsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryDepositsResponse; + fromPartial(object: { + deposits?: { + proposalId?: string | number | import("long").Long; + depositor?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _54.QueryDepositsResponse; + }; + QueryTallyResultRequest: { + encode(message: _54.QueryTallyResultRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryTallyResultRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + }): _54.QueryTallyResultRequest; + }; + QueryTallyResultResponse: { + encode(message: _54.QueryTallyResultResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _54.QueryTallyResultResponse; + fromPartial(object: { + tally?: { + yesCount?: string; + abstainCount?: string; + noCount?: string; + noWithVetoCount?: string; + }; + }): _54.QueryTallyResultResponse; + }; + voteOptionFromJSON(object: any): _53.VoteOption; + voteOptionToJSON(object: _53.VoteOption): string; + proposalStatusFromJSON(object: any): _53.ProposalStatus; + proposalStatusToJSON(object: _53.ProposalStatus): string; + VoteOption: typeof _53.VoteOption; + VoteOptionSDKType: typeof _53.VoteOption; + ProposalStatus: typeof _53.ProposalStatus; + ProposalStatusSDKType: typeof _53.ProposalStatus; + WeightedVoteOption: { + encode(message: _53.WeightedVoteOption, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _53.WeightedVoteOption; + fromPartial(object: { + option?: _53.VoteOption; + weight?: string; + }): _53.WeightedVoteOption; + }; + Deposit: { + encode(message: _53.Deposit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _53.Deposit; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + depositor?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }): _53.Deposit; + }; + Proposal: { + encode(message: _53.Proposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _53.Proposal; + fromPartial(object: { + id?: string | number | import("long").Long; + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + status?: _53.ProposalStatus; + finalTallyResult?: { + yesCount?: string; + abstainCount?: string; + noCount?: string; + noWithVetoCount?: string; + }; + submitTime?: Date; + depositEndTime?: Date; + totalDeposit?: { + denom?: string; + amount?: string; + }[]; + votingStartTime?: Date; + votingEndTime?: Date; + metadata?: string; + }): _53.Proposal; + }; + TallyResult: { + encode(message: _53.TallyResult, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _53.TallyResult; + fromPartial(object: { + yesCount?: string; + abstainCount?: string; + noCount?: string; + noWithVetoCount?: string; + }): _53.TallyResult; + }; + Vote: { + encode(message: _53.Vote, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _53.Vote; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + voter?: string; + options?: { + option?: _53.VoteOption; + weight?: string; + }[]; + metadata?: string; + }): _53.Vote; + }; + DepositParams: { + encode(message: _53.DepositParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _53.DepositParams; + fromPartial(object: { + minDeposit?: { + denom?: string; + amount?: string; + }[]; + maxDepositPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }): _53.DepositParams; + }; + VotingParams: { + encode(message: _53.VotingParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _53.VotingParams; + fromPartial(object: { + votingPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }): _53.VotingParams; + }; + TallyParams: { + encode(message: _53.TallyParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _53.TallyParams; + fromPartial(object: { + quorum?: string; + threshold?: string; + vetoThreshold?: string; + }): _53.TallyParams; + }; + GenesisState: { + encode(message: _52.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _52.GenesisState; + fromPartial(object: { + startingProposalId?: string | number | import("long").Long; + deposits?: { + proposalId?: string | number | import("long").Long; + depositor?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }[]; + votes?: { + proposalId?: string | number | import("long").Long; + voter?: string; + options?: { + option?: _53.VoteOption; + weight?: string; + }[]; + metadata?: string; + }[]; + proposals?: { + id?: string | number | import("long").Long; + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + status?: _53.ProposalStatus; + finalTallyResult?: { + yesCount?: string; + abstainCount?: string; + noCount?: string; + noWithVetoCount?: string; + }; + submitTime?: Date; + depositEndTime?: Date; + totalDeposit?: { + denom?: string; + amount?: string; + }[]; + votingStartTime?: Date; + votingEndTime?: Date; + metadata?: string; + }[]; + depositParams?: { + minDeposit?: { + denom?: string; + amount?: string; + }[]; + maxDepositPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }; + votingParams?: { + votingPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }; + tallyParams?: { + quorum?: string; + threshold?: string; + vetoThreshold?: string; + }; + }): _52.GenesisState; + }; + }; + const v1beta1: { + MsgClientImpl: typeof _187.MsgClientImpl; + QueryClientImpl: typeof _171.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + proposal(request: _58.QueryProposalRequest): Promise<_58.QueryProposalResponse>; + proposals(request: _58.QueryProposalsRequest): Promise<_58.QueryProposalsResponse>; + vote(request: _58.QueryVoteRequest): Promise<_58.QueryVoteResponse>; + votes(request: _58.QueryVotesRequest): Promise<_58.QueryVotesResponse>; + params(request: _58.QueryParamsRequest): Promise<_58.QueryParamsResponse>; + deposit(request: _58.QueryDepositRequest): Promise<_58.QueryDepositResponse>; + deposits(request: _58.QueryDepositsRequest): Promise<_58.QueryDepositsResponse>; + tallyResult(request: _58.QueryTallyResultRequest): Promise<_58.QueryTallyResultResponse>; + }; + LCDQueryClient: typeof _153.LCDQueryClient; + MsgSubmitProposal: { + encode(message: _59.MsgSubmitProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _59.MsgSubmitProposal; + fromPartial(object: { + content?: { + typeUrl?: string; + value?: Uint8Array; + }; + initialDeposit?: { + denom?: string; + amount?: string; + }[]; + proposer?: string; + }): _59.MsgSubmitProposal; + }; + MsgSubmitProposalResponse: { + encode(message: _59.MsgSubmitProposalResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _59.MsgSubmitProposalResponse; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + }): _59.MsgSubmitProposalResponse; + }; + MsgVote: { + encode(message: _59.MsgVote, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _59.MsgVote; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + voter?: string; + option?: _57.VoteOption; + }): _59.MsgVote; + }; + MsgVoteResponse: { + encode(_: _59.MsgVoteResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _59.MsgVoteResponse; + fromPartial(_: {}): _59.MsgVoteResponse; + }; + MsgVoteWeighted: { + encode(message: _59.MsgVoteWeighted, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _59.MsgVoteWeighted; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + voter?: string; + options?: { + option?: _57.VoteOption; + weight?: string; + }[]; + }): _59.MsgVoteWeighted; + }; + MsgVoteWeightedResponse: { + encode(_: _59.MsgVoteWeightedResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _59.MsgVoteWeightedResponse; + fromPartial(_: {}): _59.MsgVoteWeightedResponse; + }; + MsgDeposit: { + encode(message: _59.MsgDeposit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _59.MsgDeposit; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + depositor?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }): _59.MsgDeposit; + }; + MsgDepositResponse: { + encode(_: _59.MsgDepositResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _59.MsgDepositResponse; + fromPartial(_: {}): _59.MsgDepositResponse; + }; + QueryProposalRequest: { + encode(message: _58.QueryProposalRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryProposalRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + }): _58.QueryProposalRequest; + }; + QueryProposalResponse: { + encode(message: _58.QueryProposalResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryProposalResponse; + fromPartial(object: { + proposal?: { + proposalId?: string | number | import("long").Long; + content?: { + typeUrl?: string; + value?: Uint8Array; + }; + status?: _57.ProposalStatus; + finalTallyResult?: { + yes?: string; + abstain?: string; + no?: string; + noWithVeto?: string; + }; + submitTime?: Date; + depositEndTime?: Date; + totalDeposit?: { + denom?: string; + amount?: string; + }[]; + votingStartTime?: Date; + votingEndTime?: Date; + }; + }): _58.QueryProposalResponse; + }; + QueryProposalsRequest: { + encode(message: _58.QueryProposalsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryProposalsRequest; + fromPartial(object: { + proposalStatus?: _57.ProposalStatus; + voter?: string; + depositor?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _58.QueryProposalsRequest; + }; + QueryProposalsResponse: { + encode(message: _58.QueryProposalsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryProposalsResponse; + fromPartial(object: { + proposals?: { + proposalId?: string | number | import("long").Long; + content?: { + typeUrl?: string; + value?: Uint8Array; + }; + status?: _57.ProposalStatus; + finalTallyResult?: { + yes?: string; + abstain?: string; + no?: string; + noWithVeto?: string; + }; + submitTime?: Date; + depositEndTime?: Date; + totalDeposit?: { + denom?: string; + amount?: string; + }[]; + votingStartTime?: Date; + votingEndTime?: Date; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _58.QueryProposalsResponse; + }; + QueryVoteRequest: { + encode(message: _58.QueryVoteRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryVoteRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + voter?: string; + }): _58.QueryVoteRequest; + }; + QueryVoteResponse: { + encode(message: _58.QueryVoteResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryVoteResponse; + fromPartial(object: { + vote?: { + proposalId?: string | number | import("long").Long; + voter?: string; + option?: _57.VoteOption; + options?: { + option?: _57.VoteOption; + weight?: string; + }[]; + }; + }): _58.QueryVoteResponse; + }; + QueryVotesRequest: { + encode(message: _58.QueryVotesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryVotesRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _58.QueryVotesRequest; + }; + QueryVotesResponse: { + encode(message: _58.QueryVotesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryVotesResponse; + fromPartial(object: { + votes?: { + proposalId?: string | number | import("long").Long; + voter?: string; + option?: _57.VoteOption; + options?: { + option?: _57.VoteOption; + weight?: string; + }[]; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _58.QueryVotesResponse; + }; + QueryParamsRequest: { + encode(message: _58.QueryParamsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryParamsRequest; + fromPartial(object: { + paramsType?: string; + }): _58.QueryParamsRequest; + }; + QueryParamsResponse: { + encode(message: _58.QueryParamsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryParamsResponse; + fromPartial(object: { + votingParams?: { + votingPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }; + depositParams?: { + minDeposit?: { + denom?: string; + amount?: string; + }[]; + maxDepositPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }; + tallyParams?: { + quorum?: Uint8Array; + threshold?: Uint8Array; + vetoThreshold?: Uint8Array; + }; + }): _58.QueryParamsResponse; + }; + QueryDepositRequest: { + encode(message: _58.QueryDepositRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryDepositRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + depositor?: string; + }): _58.QueryDepositRequest; + }; + QueryDepositResponse: { + encode(message: _58.QueryDepositResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryDepositResponse; + fromPartial(object: { + deposit?: { + proposalId?: string | number | import("long").Long; + depositor?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }; + }): _58.QueryDepositResponse; + }; + QueryDepositsRequest: { + encode(message: _58.QueryDepositsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryDepositsRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _58.QueryDepositsRequest; + }; + QueryDepositsResponse: { + encode(message: _58.QueryDepositsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryDepositsResponse; + fromPartial(object: { + deposits?: { + proposalId?: string | number | import("long").Long; + depositor?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _58.QueryDepositsResponse; + }; + QueryTallyResultRequest: { + encode(message: _58.QueryTallyResultRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryTallyResultRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + }): _58.QueryTallyResultRequest; + }; + QueryTallyResultResponse: { + encode(message: _58.QueryTallyResultResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _58.QueryTallyResultResponse; + fromPartial(object: { + tally?: { + yes?: string; + abstain?: string; + no?: string; + noWithVeto?: string; + }; + }): _58.QueryTallyResultResponse; + }; + voteOptionFromJSON(object: any): _57.VoteOption; + voteOptionToJSON(object: _57.VoteOption): string; + proposalStatusFromJSON(object: any): _57.ProposalStatus; + proposalStatusToJSON(object: _57.ProposalStatus): string; + VoteOption: typeof _57.VoteOption; + VoteOptionSDKType: typeof _57.VoteOption; + ProposalStatus: typeof _57.ProposalStatus; + ProposalStatusSDKType: typeof _57.ProposalStatus; + WeightedVoteOption: { + encode(message: _57.WeightedVoteOption, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _57.WeightedVoteOption; + fromPartial(object: { + option?: _57.VoteOption; + weight?: string; + }): _57.WeightedVoteOption; + }; + TextProposal: { + encode(message: _57.TextProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _57.TextProposal; + fromPartial(object: { + title?: string; + description?: string; + }): _57.TextProposal; + }; + Deposit: { + encode(message: _57.Deposit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _57.Deposit; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + depositor?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }): _57.Deposit; + }; + Proposal: { + encode(message: _57.Proposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _57.Proposal; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + content?: { + typeUrl?: string; + value?: Uint8Array; + }; + status?: _57.ProposalStatus; + finalTallyResult?: { + yes?: string; + abstain?: string; + no?: string; + noWithVeto?: string; + }; + submitTime?: Date; + depositEndTime?: Date; + totalDeposit?: { + denom?: string; + amount?: string; + }[]; + votingStartTime?: Date; + votingEndTime?: Date; + }): _57.Proposal; + }; + TallyResult: { + encode(message: _57.TallyResult, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _57.TallyResult; + fromPartial(object: { + yes?: string; + abstain?: string; + no?: string; + noWithVeto?: string; + }): _57.TallyResult; + }; + Vote: { + encode(message: _57.Vote, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _57.Vote; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + voter?: string; + option?: _57.VoteOption; + options?: { + option?: _57.VoteOption; + weight?: string; + }[]; + }): _57.Vote; + }; + DepositParams: { + encode(message: _57.DepositParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _57.DepositParams; + fromPartial(object: { + minDeposit?: { + denom?: string; + amount?: string; + }[]; + maxDepositPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }): _57.DepositParams; + }; + VotingParams: { + encode(message: _57.VotingParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _57.VotingParams; + fromPartial(object: { + votingPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }): _57.VotingParams; + }; + TallyParams: { + encode(message: _57.TallyParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _57.TallyParams; + fromPartial(object: { + quorum?: Uint8Array; + threshold?: Uint8Array; + vetoThreshold?: Uint8Array; + }): _57.TallyParams; + }; + GenesisState: { + encode(message: _56.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _56.GenesisState; + fromPartial(object: { + startingProposalId?: string | number | import("long").Long; + deposits?: { + proposalId?: string | number | import("long").Long; + depositor?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }[]; + votes?: { + proposalId?: string | number | import("long").Long; + voter?: string; + option?: _57.VoteOption; + options?: { + option?: _57.VoteOption; + weight?: string; + }[]; + }[]; + proposals?: { + proposalId?: string | number | import("long").Long; + content?: { + typeUrl?: string; + value?: Uint8Array; + }; + status?: _57.ProposalStatus; + finalTallyResult?: { + yes?: string; + abstain?: string; + no?: string; + noWithVeto?: string; + }; + submitTime?: Date; + depositEndTime?: Date; + totalDeposit?: { + denom?: string; + amount?: string; + }[]; + votingStartTime?: Date; + votingEndTime?: Date; + }[]; + depositParams?: { + minDeposit?: { + denom?: string; + amount?: string; + }[]; + maxDepositPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }; + votingParams?: { + votingPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }; + tallyParams?: { + quorum?: Uint8Array; + threshold?: Uint8Array; + vetoThreshold?: Uint8Array; + }; + }): _56.GenesisState; + }; + }; + } + namespace group { + const v1: { + MsgClientImpl: typeof _188.MsgClientImpl; + QueryClientImpl: typeof _172.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + groupInfo(request: _62.QueryGroupInfoRequest): Promise<_62.QueryGroupInfoResponse>; + groupPolicyInfo(request: _62.QueryGroupPolicyInfoRequest): Promise<_62.QueryGroupPolicyInfoResponse>; + groupMembers(request: _62.QueryGroupMembersRequest): Promise<_62.QueryGroupMembersResponse>; + groupsByAdmin(request: _62.QueryGroupsByAdminRequest): Promise<_62.QueryGroupsByAdminResponse>; + groupPoliciesByGroup(request: _62.QueryGroupPoliciesByGroupRequest): Promise<_62.QueryGroupPoliciesByGroupResponse>; + groupPoliciesByAdmin(request: _62.QueryGroupPoliciesByAdminRequest): Promise<_62.QueryGroupPoliciesByAdminResponse>; + proposal(request: _62.QueryProposalRequest): Promise<_62.QueryProposalResponse>; + proposalsByGroupPolicy(request: _62.QueryProposalsByGroupPolicyRequest): Promise<_62.QueryProposalsByGroupPolicyResponse>; + voteByProposalVoter(request: _62.QueryVoteByProposalVoterRequest): Promise<_62.QueryVoteByProposalVoterResponse>; + votesByProposal(request: _62.QueryVotesByProposalRequest): Promise<_62.QueryVotesByProposalResponse>; + votesByVoter(request: _62.QueryVotesByVoterRequest): Promise<_62.QueryVotesByVoterResponse>; + groupsByMember(request: _62.QueryGroupsByMemberRequest): Promise<_62.QueryGroupsByMemberResponse>; + tallyResult(request: _62.QueryTallyResultRequest): Promise<_62.QueryTallyResultResponse>; + }; + LCDQueryClient: typeof _154.LCDQueryClient; + voteOptionFromJSON(object: any): _64.VoteOption; + voteOptionToJSON(object: _64.VoteOption): string; + proposalStatusFromJSON(object: any): _64.ProposalStatus; + proposalStatusToJSON(object: _64.ProposalStatus): string; + proposalResultFromJSON(object: any): _64.ProposalResult; + proposalResultToJSON(object: _64.ProposalResult): string; + proposalExecutorResultFromJSON(object: any): _64.ProposalExecutorResult; + proposalExecutorResultToJSON(object: _64.ProposalExecutorResult): string; + VoteOption: typeof _64.VoteOption; + VoteOptionSDKType: typeof _64.VoteOption; + ProposalStatus: typeof _64.ProposalStatus; + ProposalStatusSDKType: typeof _64.ProposalStatus; + ProposalResult: typeof _64.ProposalResult; + ProposalResultSDKType: typeof _64.ProposalResult; + ProposalExecutorResult: typeof _64.ProposalExecutorResult; + ProposalExecutorResultSDKType: typeof _64.ProposalExecutorResult; + Member: { + encode(message: _64.Member, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _64.Member; + fromPartial(object: { + address?: string; + weight?: string; + metadata?: string; + addedAt?: Date; + }): _64.Member; + }; + Members: { + encode(message: _64.Members, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _64.Members; + fromPartial(object: { + members?: { + address?: string; + weight?: string; + metadata?: string; + addedAt?: Date; + }[]; + }): _64.Members; + }; + ThresholdDecisionPolicy: { + encode(message: _64.ThresholdDecisionPolicy, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _64.ThresholdDecisionPolicy; + fromPartial(object: { + threshold?: string; + windows?: { + votingPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + minExecutionPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }; + }): _64.ThresholdDecisionPolicy; + }; + PercentageDecisionPolicy: { + encode(message: _64.PercentageDecisionPolicy, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _64.PercentageDecisionPolicy; + fromPartial(object: { + percentage?: string; + windows?: { + votingPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + minExecutionPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }; + }): _64.PercentageDecisionPolicy; + }; + DecisionPolicyWindows: { + encode(message: _64.DecisionPolicyWindows, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _64.DecisionPolicyWindows; + fromPartial(object: { + votingPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + minExecutionPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + }): _64.DecisionPolicyWindows; + }; + GroupInfo: { + encode(message: _64.GroupInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _64.GroupInfo; + fromPartial(object: { + id?: string | number | import("long").Long; + admin?: string; + metadata?: string; + version?: string | number | import("long").Long; + totalWeight?: string; + createdAt?: Date; + }): _64.GroupInfo; + }; + GroupMember: { + encode(message: _64.GroupMember, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _64.GroupMember; + fromPartial(object: { + groupId?: string | number | import("long").Long; + member?: { + address?: string; + weight?: string; + metadata?: string; + addedAt?: Date; + }; + }): _64.GroupMember; + }; + GroupPolicyInfo: { + encode(message: _64.GroupPolicyInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _64.GroupPolicyInfo; + fromPartial(object: { + address?: string; + groupId?: string | number | import("long").Long; + admin?: string; + metadata?: string; + version?: string | number | import("long").Long; + decisionPolicy?: { + typeUrl?: string; + value?: Uint8Array; + }; + createdAt?: Date; + }): _64.GroupPolicyInfo; + }; + Proposal: { + encode(message: _64.Proposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _64.Proposal; + fromPartial(object: { + id?: string | number | import("long").Long; + address?: string; + metadata?: string; + proposers?: string[]; + submitTime?: Date; + groupVersion?: string | number | import("long").Long; + groupPolicyVersion?: string | number | import("long").Long; + status?: _64.ProposalStatus; + result?: _64.ProposalResult; + finalTallyResult?: { + yesCount?: string; + abstainCount?: string; + noCount?: string; + noWithVetoCount?: string; + }; + votingPeriodEnd?: Date; + executorResult?: _64.ProposalExecutorResult; + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }): _64.Proposal; + }; + TallyResult: { + encode(message: _64.TallyResult, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _64.TallyResult; + fromPartial(object: { + yesCount?: string; + abstainCount?: string; + noCount?: string; + noWithVetoCount?: string; + }): _64.TallyResult; + }; + Vote: { + encode(message: _64.Vote, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _64.Vote; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + voter?: string; + option?: _64.VoteOption; + metadata?: string; + submitTime?: Date; + }): _64.Vote; + }; + execFromJSON(object: any): _63.Exec; + execToJSON(object: _63.Exec): string; + Exec: typeof _63.Exec; + ExecSDKType: typeof _63.Exec; + MsgCreateGroup: { + encode(message: _63.MsgCreateGroup, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgCreateGroup; + fromPartial(object: { + admin?: string; + members?: { + address?: string; + weight?: string; + metadata?: string; + addedAt?: Date; + }[]; + metadata?: string; + }): _63.MsgCreateGroup; + }; + MsgCreateGroupResponse: { + encode(message: _63.MsgCreateGroupResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgCreateGroupResponse; + fromPartial(object: { + groupId?: string | number | import("long").Long; + }): _63.MsgCreateGroupResponse; + }; + MsgUpdateGroupMembers: { + encode(message: _63.MsgUpdateGroupMembers, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgUpdateGroupMembers; + fromPartial(object: { + admin?: string; + groupId?: string | number | import("long").Long; + memberUpdates?: { + address?: string; + weight?: string; + metadata?: string; + addedAt?: Date; + }[]; + }): _63.MsgUpdateGroupMembers; + }; + MsgUpdateGroupMembersResponse: { + encode(_: _63.MsgUpdateGroupMembersResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgUpdateGroupMembersResponse; + fromPartial(_: {}): _63.MsgUpdateGroupMembersResponse; + }; + MsgUpdateGroupAdmin: { + encode(message: _63.MsgUpdateGroupAdmin, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgUpdateGroupAdmin; + fromPartial(object: { + admin?: string; + groupId?: string | number | import("long").Long; + newAdmin?: string; + }): _63.MsgUpdateGroupAdmin; + }; + MsgUpdateGroupAdminResponse: { + encode(_: _63.MsgUpdateGroupAdminResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgUpdateGroupAdminResponse; + fromPartial(_: {}): _63.MsgUpdateGroupAdminResponse; + }; + MsgUpdateGroupMetadata: { + encode(message: _63.MsgUpdateGroupMetadata, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgUpdateGroupMetadata; + fromPartial(object: { + admin?: string; + groupId?: string | number | import("long").Long; + metadata?: string; + }): _63.MsgUpdateGroupMetadata; + }; + MsgUpdateGroupMetadataResponse: { + encode(_: _63.MsgUpdateGroupMetadataResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgUpdateGroupMetadataResponse; + fromPartial(_: {}): _63.MsgUpdateGroupMetadataResponse; + }; + MsgCreateGroupPolicy: { + encode(message: _63.MsgCreateGroupPolicy, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgCreateGroupPolicy; + fromPartial(object: { + admin?: string; + groupId?: string | number | import("long").Long; + metadata?: string; + decisionPolicy?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _63.MsgCreateGroupPolicy; + }; + MsgCreateGroupPolicyResponse: { + encode(message: _63.MsgCreateGroupPolicyResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgCreateGroupPolicyResponse; + fromPartial(object: { + address?: string; + }): _63.MsgCreateGroupPolicyResponse; + }; + MsgUpdateGroupPolicyAdmin: { + encode(message: _63.MsgUpdateGroupPolicyAdmin, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgUpdateGroupPolicyAdmin; + fromPartial(object: { + admin?: string; + address?: string; + newAdmin?: string; + }): _63.MsgUpdateGroupPolicyAdmin; + }; + MsgCreateGroupWithPolicy: { + encode(message: _63.MsgCreateGroupWithPolicy, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgCreateGroupWithPolicy; + fromPartial(object: { + admin?: string; + members?: { + address?: string; + weight?: string; + metadata?: string; + addedAt?: Date; + }[]; + groupMetadata?: string; + groupPolicyMetadata?: string; + groupPolicyAsAdmin?: boolean; + decisionPolicy?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _63.MsgCreateGroupWithPolicy; + }; + MsgCreateGroupWithPolicyResponse: { + encode(message: _63.MsgCreateGroupWithPolicyResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgCreateGroupWithPolicyResponse; + fromPartial(object: { + groupId?: string | number | import("long").Long; + groupPolicyAddress?: string; + }): _63.MsgCreateGroupWithPolicyResponse; + }; + MsgUpdateGroupPolicyAdminResponse: { + encode(_: _63.MsgUpdateGroupPolicyAdminResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgUpdateGroupPolicyAdminResponse; + fromPartial(_: {}): _63.MsgUpdateGroupPolicyAdminResponse; + }; + MsgUpdateGroupPolicyDecisionPolicy: { + encode(message: _63.MsgUpdateGroupPolicyDecisionPolicy, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgUpdateGroupPolicyDecisionPolicy; + fromPartial(object: { + admin?: string; + address?: string; + decisionPolicy?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _63.MsgUpdateGroupPolicyDecisionPolicy; + }; + MsgUpdateGroupPolicyDecisionPolicyResponse: { + encode(_: _63.MsgUpdateGroupPolicyDecisionPolicyResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgUpdateGroupPolicyDecisionPolicyResponse; + fromPartial(_: {}): _63.MsgUpdateGroupPolicyDecisionPolicyResponse; + }; + MsgUpdateGroupPolicyMetadata: { + encode(message: _63.MsgUpdateGroupPolicyMetadata, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgUpdateGroupPolicyMetadata; + fromPartial(object: { + admin?: string; + address?: string; + metadata?: string; + }): _63.MsgUpdateGroupPolicyMetadata; + }; + MsgUpdateGroupPolicyMetadataResponse: { + encode(_: _63.MsgUpdateGroupPolicyMetadataResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgUpdateGroupPolicyMetadataResponse; + fromPartial(_: {}): _63.MsgUpdateGroupPolicyMetadataResponse; + }; + MsgSubmitProposal: { + encode(message: _63.MsgSubmitProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgSubmitProposal; + fromPartial(object: { + address?: string; + proposers?: string[]; + metadata?: string; + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + exec?: _63.Exec; + }): _63.MsgSubmitProposal; + }; + MsgSubmitProposalResponse: { + encode(message: _63.MsgSubmitProposalResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgSubmitProposalResponse; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + }): _63.MsgSubmitProposalResponse; + }; + MsgWithdrawProposal: { + encode(message: _63.MsgWithdrawProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgWithdrawProposal; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + address?: string; + }): _63.MsgWithdrawProposal; + }; + MsgWithdrawProposalResponse: { + encode(_: _63.MsgWithdrawProposalResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgWithdrawProposalResponse; + fromPartial(_: {}): _63.MsgWithdrawProposalResponse; + }; + MsgVote: { + encode(message: _63.MsgVote, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgVote; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + voter?: string; + option?: _64.VoteOption; + metadata?: string; + exec?: _63.Exec; + }): _63.MsgVote; + }; + MsgVoteResponse: { + encode(_: _63.MsgVoteResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgVoteResponse; + fromPartial(_: {}): _63.MsgVoteResponse; + }; + MsgExec: { + encode(message: _63.MsgExec, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgExec; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + signer?: string; + }): _63.MsgExec; + }; + MsgExecResponse: { + encode(_: _63.MsgExecResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgExecResponse; + fromPartial(_: {}): _63.MsgExecResponse; + }; + MsgLeaveGroup: { + encode(message: _63.MsgLeaveGroup, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgLeaveGroup; + fromPartial(object: { + address?: string; + groupId?: string | number | import("long").Long; + }): _63.MsgLeaveGroup; + }; + MsgLeaveGroupResponse: { + encode(_: _63.MsgLeaveGroupResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _63.MsgLeaveGroupResponse; + fromPartial(_: {}): _63.MsgLeaveGroupResponse; + }; + QueryGroupInfoRequest: { + encode(message: _62.QueryGroupInfoRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupInfoRequest; + fromPartial(object: { + groupId?: string | number | import("long").Long; + }): _62.QueryGroupInfoRequest; + }; + QueryGroupInfoResponse: { + encode(message: _62.QueryGroupInfoResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupInfoResponse; + fromPartial(object: { + info?: { + id?: string | number | import("long").Long; + admin?: string; + metadata?: string; + version?: string | number | import("long").Long; + totalWeight?: string; + createdAt?: Date; + }; + }): _62.QueryGroupInfoResponse; + }; + QueryGroupPolicyInfoRequest: { + encode(message: _62.QueryGroupPolicyInfoRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupPolicyInfoRequest; + fromPartial(object: { + address?: string; + }): _62.QueryGroupPolicyInfoRequest; + }; + QueryGroupPolicyInfoResponse: { + encode(message: _62.QueryGroupPolicyInfoResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupPolicyInfoResponse; + fromPartial(object: { + info?: { + address?: string; + groupId?: string | number | import("long").Long; + admin?: string; + metadata?: string; + version?: string | number | import("long").Long; + decisionPolicy?: { + typeUrl?: string; + value?: Uint8Array; + }; + createdAt?: Date; + }; + }): _62.QueryGroupPolicyInfoResponse; + }; + QueryGroupMembersRequest: { + encode(message: _62.QueryGroupMembersRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupMembersRequest; + fromPartial(object: { + groupId?: string | number | import("long").Long; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _62.QueryGroupMembersRequest; + }; + QueryGroupMembersResponse: { + encode(message: _62.QueryGroupMembersResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupMembersResponse; + fromPartial(object: { + members?: { + groupId?: string | number | import("long").Long; + member?: { + address?: string; + weight?: string; + metadata?: string; + addedAt?: Date; + }; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _62.QueryGroupMembersResponse; + }; + QueryGroupsByAdminRequest: { + encode(message: _62.QueryGroupsByAdminRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupsByAdminRequest; + fromPartial(object: { + admin?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _62.QueryGroupsByAdminRequest; + }; + QueryGroupsByAdminResponse: { + encode(message: _62.QueryGroupsByAdminResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupsByAdminResponse; + fromPartial(object: { + groups?: { + id?: string | number | import("long").Long; + admin?: string; + metadata?: string; + version?: string | number | import("long").Long; + totalWeight?: string; + createdAt?: Date; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _62.QueryGroupsByAdminResponse; + }; + QueryGroupPoliciesByGroupRequest: { + encode(message: _62.QueryGroupPoliciesByGroupRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupPoliciesByGroupRequest; + fromPartial(object: { + groupId?: string | number | import("long").Long; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _62.QueryGroupPoliciesByGroupRequest; + }; + QueryGroupPoliciesByGroupResponse: { + encode(message: _62.QueryGroupPoliciesByGroupResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupPoliciesByGroupResponse; + fromPartial(object: { + groupPolicies?: { + address?: string; + groupId?: string | number | import("long").Long; + admin?: string; + metadata?: string; + version?: string | number | import("long").Long; + decisionPolicy?: { + typeUrl?: string; + value?: Uint8Array; + }; + createdAt?: Date; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _62.QueryGroupPoliciesByGroupResponse; + }; + QueryGroupPoliciesByAdminRequest: { + encode(message: _62.QueryGroupPoliciesByAdminRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupPoliciesByAdminRequest; + fromPartial(object: { + admin?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _62.QueryGroupPoliciesByAdminRequest; + }; + QueryGroupPoliciesByAdminResponse: { + encode(message: _62.QueryGroupPoliciesByAdminResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupPoliciesByAdminResponse; + fromPartial(object: { + groupPolicies?: { + address?: string; + groupId?: string | number | import("long").Long; + admin?: string; + metadata?: string; + version?: string | number | import("long").Long; + decisionPolicy?: { + typeUrl?: string; + value?: Uint8Array; + }; + createdAt?: Date; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _62.QueryGroupPoliciesByAdminResponse; + }; + QueryProposalRequest: { + encode(message: _62.QueryProposalRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryProposalRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + }): _62.QueryProposalRequest; + }; + QueryProposalResponse: { + encode(message: _62.QueryProposalResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryProposalResponse; + fromPartial(object: { + proposal?: { + id?: string | number | import("long").Long; + address?: string; + metadata?: string; + proposers?: string[]; + submitTime?: Date; + groupVersion?: string | number | import("long").Long; + groupPolicyVersion?: string | number | import("long").Long; + status?: _64.ProposalStatus; + result?: _64.ProposalResult; + finalTallyResult?: { + yesCount?: string; + abstainCount?: string; + noCount?: string; + noWithVetoCount?: string; + }; + votingPeriodEnd?: Date; + executorResult?: _64.ProposalExecutorResult; + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }; + }): _62.QueryProposalResponse; + }; + QueryProposalsByGroupPolicyRequest: { + encode(message: _62.QueryProposalsByGroupPolicyRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryProposalsByGroupPolicyRequest; + fromPartial(object: { + address?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _62.QueryProposalsByGroupPolicyRequest; + }; + QueryProposalsByGroupPolicyResponse: { + encode(message: _62.QueryProposalsByGroupPolicyResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryProposalsByGroupPolicyResponse; + fromPartial(object: { + proposals?: { + id?: string | number | import("long").Long; + address?: string; + metadata?: string; + proposers?: string[]; + submitTime?: Date; + groupVersion?: string | number | import("long").Long; + groupPolicyVersion?: string | number | import("long").Long; + status?: _64.ProposalStatus; + result?: _64.ProposalResult; + finalTallyResult?: { + yesCount?: string; + abstainCount?: string; + noCount?: string; + noWithVetoCount?: string; + }; + votingPeriodEnd?: Date; + executorResult?: _64.ProposalExecutorResult; + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _62.QueryProposalsByGroupPolicyResponse; + }; + QueryVoteByProposalVoterRequest: { + encode(message: _62.QueryVoteByProposalVoterRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryVoteByProposalVoterRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + voter?: string; + }): _62.QueryVoteByProposalVoterRequest; + }; + QueryVoteByProposalVoterResponse: { + encode(message: _62.QueryVoteByProposalVoterResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryVoteByProposalVoterResponse; + fromPartial(object: { + vote?: { + proposalId?: string | number | import("long").Long; + voter?: string; + option?: _64.VoteOption; + metadata?: string; + submitTime?: Date; + }; + }): _62.QueryVoteByProposalVoterResponse; + }; + QueryVotesByProposalRequest: { + encode(message: _62.QueryVotesByProposalRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryVotesByProposalRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _62.QueryVotesByProposalRequest; + }; + QueryVotesByProposalResponse: { + encode(message: _62.QueryVotesByProposalResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryVotesByProposalResponse; + fromPartial(object: { + votes?: { + proposalId?: string | number | import("long").Long; + voter?: string; + option?: _64.VoteOption; + metadata?: string; + submitTime?: Date; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _62.QueryVotesByProposalResponse; + }; + QueryVotesByVoterRequest: { + encode(message: _62.QueryVotesByVoterRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryVotesByVoterRequest; + fromPartial(object: { + voter?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _62.QueryVotesByVoterRequest; + }; + QueryVotesByVoterResponse: { + encode(message: _62.QueryVotesByVoterResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryVotesByVoterResponse; + fromPartial(object: { + votes?: { + proposalId?: string | number | import("long").Long; + voter?: string; + option?: _64.VoteOption; + metadata?: string; + submitTime?: Date; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _62.QueryVotesByVoterResponse; + }; + QueryGroupsByMemberRequest: { + encode(message: _62.QueryGroupsByMemberRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupsByMemberRequest; + fromPartial(object: { + address?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _62.QueryGroupsByMemberRequest; + }; + QueryGroupsByMemberResponse: { + encode(message: _62.QueryGroupsByMemberResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryGroupsByMemberResponse; + fromPartial(object: { + groups?: { + id?: string | number | import("long").Long; + admin?: string; + metadata?: string; + version?: string | number | import("long").Long; + totalWeight?: string; + createdAt?: Date; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _62.QueryGroupsByMemberResponse; + }; + QueryTallyResultRequest: { + encode(message: _62.QueryTallyResultRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryTallyResultRequest; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + }): _62.QueryTallyResultRequest; + }; + QueryTallyResultResponse: { + encode(message: _62.QueryTallyResultResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _62.QueryTallyResultResponse; + fromPartial(object: { + tally?: { + yesCount?: string; + abstainCount?: string; + noCount?: string; + noWithVetoCount?: string; + }; + }): _62.QueryTallyResultResponse; + }; + GenesisState: { + encode(message: _61.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _61.GenesisState; + fromPartial(object: { + groupSeq?: string | number | import("long").Long; + groups?: { + id?: string | number | import("long").Long; + admin?: string; + metadata?: string; + version?: string | number | import("long").Long; + totalWeight?: string; + createdAt?: Date; + }[]; + groupMembers?: { + groupId?: string | number | import("long").Long; + member?: { + address?: string; + weight?: string; + metadata?: string; + addedAt?: Date; + }; + }[]; + groupPolicySeq?: string | number | import("long").Long; + groupPolicies?: { + address?: string; + groupId?: string | number | import("long").Long; + admin?: string; + metadata?: string; + version?: string | number | import("long").Long; + decisionPolicy?: { + typeUrl?: string; + value?: Uint8Array; + }; + createdAt?: Date; + }[]; + proposalSeq?: string | number | import("long").Long; + proposals?: { + id?: string | number | import("long").Long; + address?: string; + metadata?: string; + proposers?: string[]; + submitTime?: Date; + groupVersion?: string | number | import("long").Long; + groupPolicyVersion?: string | number | import("long").Long; + status?: _64.ProposalStatus; + result?: _64.ProposalResult; + finalTallyResult?: { + yesCount?: string; + abstainCount?: string; + noCount?: string; + noWithVetoCount?: string; + }; + votingPeriodEnd?: Date; + executorResult?: _64.ProposalExecutorResult; + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }[]; + votes?: { + proposalId?: string | number | import("long").Long; + voter?: string; + option?: _64.VoteOption; + metadata?: string; + submitTime?: Date; + }[]; + }): _61.GenesisState; + }; + EventCreateGroup: { + encode(message: _60.EventCreateGroup, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _60.EventCreateGroup; + fromPartial(object: { + groupId?: string | number | import("long").Long; + }): _60.EventCreateGroup; + }; + EventUpdateGroup: { + encode(message: _60.EventUpdateGroup, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _60.EventUpdateGroup; + fromPartial(object: { + groupId?: string | number | import("long").Long; + }): _60.EventUpdateGroup; + }; + EventCreateGroupPolicy: { + encode(message: _60.EventCreateGroupPolicy, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _60.EventCreateGroupPolicy; + fromPartial(object: { + address?: string; + }): _60.EventCreateGroupPolicy; + }; + EventUpdateGroupPolicy: { + encode(message: _60.EventUpdateGroupPolicy, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _60.EventUpdateGroupPolicy; + fromPartial(object: { + address?: string; + }): _60.EventUpdateGroupPolicy; + }; + EventSubmitProposal: { + encode(message: _60.EventSubmitProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _60.EventSubmitProposal; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + }): _60.EventSubmitProposal; + }; + EventWithdrawProposal: { + encode(message: _60.EventWithdrawProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _60.EventWithdrawProposal; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + }): _60.EventWithdrawProposal; + }; + EventVote: { + encode(message: _60.EventVote, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _60.EventVote; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + }): _60.EventVote; + }; + EventExec: { + encode(message: _60.EventExec, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _60.EventExec; + fromPartial(object: { + proposalId?: string | number | import("long").Long; + result?: _64.ProposalExecutorResult; + }): _60.EventExec; + }; + EventLeaveGroup: { + encode(message: _60.EventLeaveGroup, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _60.EventLeaveGroup; + fromPartial(object: { + groupId?: string | number | import("long").Long; + address?: string; + }): _60.EventLeaveGroup; + }; + }; + } + namespace mint { + const v1beta1: { + QueryClientImpl: typeof _173.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + params(request?: _67.QueryParamsRequest): Promise<_67.QueryParamsResponse>; + inflation(request?: _67.QueryInflationRequest): Promise<_67.QueryInflationResponse>; + annualProvisions(request?: _67.QueryAnnualProvisionsRequest): Promise<_67.QueryAnnualProvisionsResponse>; + }; + LCDQueryClient: typeof _155.LCDQueryClient; + QueryParamsRequest: { + encode(_: _67.QueryParamsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _67.QueryParamsRequest; + fromPartial(_: {}): _67.QueryParamsRequest; + }; + QueryParamsResponse: { + encode(message: _67.QueryParamsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _67.QueryParamsResponse; + fromPartial(object: { + params?: { + mintDenom?: string; + inflationRateChange?: string; + inflationMax?: string; + inflationMin?: string; + goalBonded?: string; + blocksPerYear?: string | number | import("long").Long; + }; + }): _67.QueryParamsResponse; + }; + QueryInflationRequest: { + encode(_: _67.QueryInflationRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _67.QueryInflationRequest; + fromPartial(_: {}): _67.QueryInflationRequest; + }; + QueryInflationResponse: { + encode(message: _67.QueryInflationResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _67.QueryInflationResponse; + fromPartial(object: { + inflation?: Uint8Array; + }): _67.QueryInflationResponse; + }; + QueryAnnualProvisionsRequest: { + encode(_: _67.QueryAnnualProvisionsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _67.QueryAnnualProvisionsRequest; + fromPartial(_: {}): _67.QueryAnnualProvisionsRequest; + }; + QueryAnnualProvisionsResponse: { + encode(message: _67.QueryAnnualProvisionsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _67.QueryAnnualProvisionsResponse; + fromPartial(object: { + annualProvisions?: Uint8Array; + }): _67.QueryAnnualProvisionsResponse; + }; + Minter: { + encode(message: _66.Minter, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _66.Minter; + fromPartial(object: { + inflation?: string; + annualProvisions?: string; + }): _66.Minter; + }; + Params: { + encode(message: _66.Params, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _66.Params; + fromPartial(object: { + mintDenom?: string; + inflationRateChange?: string; + inflationMax?: string; + inflationMin?: string; + goalBonded?: string; + blocksPerYear?: string | number | import("long").Long; + }): _66.Params; + }; + GenesisState: { + encode(message: _65.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _65.GenesisState; + fromPartial(object: { + minter?: { + inflation?: string; + annualProvisions?: string; + }; + params?: { + mintDenom?: string; + inflationRateChange?: string; + inflationMax?: string; + inflationMin?: string; + goalBonded?: string; + blocksPerYear?: string | number | import("long").Long; + }; + }): _65.GenesisState; + }; + }; + } + namespace msg { + const v1: {}; + } + namespace nft { + const v1beta1: { + MsgClientImpl: typeof _189.MsgClientImpl; + QueryClientImpl: typeof _174.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + balance(request: _72.QueryBalanceRequest): Promise<_72.QueryBalanceResponse>; + owner(request: _72.QueryOwnerRequest): Promise<_72.QueryOwnerResponse>; + supply(request: _72.QuerySupplyRequest): Promise<_72.QuerySupplyResponse>; + nFTs(request: _72.QueryNFTsRequest): Promise<_72.QueryNFTsResponse>; + nFT(request: _72.QueryNFTRequest): Promise<_72.QueryNFTResponse>; + class(request: _72.QueryClassRequest): Promise<_72.QueryClassResponse>; + classes(request?: _72.QueryClassesRequest): Promise<_72.QueryClassesResponse>; + }; + LCDQueryClient: typeof _156.LCDQueryClient; + MsgSend: { + encode(message: _73.MsgSend, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _73.MsgSend; + fromPartial(object: { + classId?: string; + id?: string; + sender?: string; + receiver?: string; + }): _73.MsgSend; + }; + MsgSendResponse: { + encode(_: _73.MsgSendResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _73.MsgSendResponse; + fromPartial(_: {}): _73.MsgSendResponse; + }; + QueryBalanceRequest: { + encode(message: _72.QueryBalanceRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QueryBalanceRequest; + fromPartial(object: { + classId?: string; + owner?: string; + }): _72.QueryBalanceRequest; + }; + QueryBalanceResponse: { + encode(message: _72.QueryBalanceResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QueryBalanceResponse; + fromPartial(object: { + amount?: string | number | import("long").Long; + }): _72.QueryBalanceResponse; + }; + QueryOwnerRequest: { + encode(message: _72.QueryOwnerRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QueryOwnerRequest; + fromPartial(object: { + classId?: string; + id?: string; + }): _72.QueryOwnerRequest; + }; + QueryOwnerResponse: { + encode(message: _72.QueryOwnerResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QueryOwnerResponse; + fromPartial(object: { + owner?: string; + }): _72.QueryOwnerResponse; + }; + QuerySupplyRequest: { + encode(message: _72.QuerySupplyRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QuerySupplyRequest; + fromPartial(object: { + classId?: string; + }): _72.QuerySupplyRequest; + }; + QuerySupplyResponse: { + encode(message: _72.QuerySupplyResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QuerySupplyResponse; + fromPartial(object: { + amount?: string | number | import("long").Long; + }): _72.QuerySupplyResponse; + }; + QueryNFTsRequest: { + encode(message: _72.QueryNFTsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QueryNFTsRequest; + fromPartial(object: { + classId?: string; + owner?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _72.QueryNFTsRequest; + }; + QueryNFTsResponse: { + encode(message: _72.QueryNFTsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QueryNFTsResponse; + fromPartial(object: { + nfts?: { + classId?: string; + id?: string; + uri?: string; + uriHash?: string; + data?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _72.QueryNFTsResponse; + }; + QueryNFTRequest: { + encode(message: _72.QueryNFTRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QueryNFTRequest; + fromPartial(object: { + classId?: string; + id?: string; + }): _72.QueryNFTRequest; + }; + QueryNFTResponse: { + encode(message: _72.QueryNFTResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QueryNFTResponse; + fromPartial(object: { + nft?: { + classId?: string; + id?: string; + uri?: string; + uriHash?: string; + data?: { + typeUrl?: string; + value?: Uint8Array; + }; + }; + }): _72.QueryNFTResponse; + }; + QueryClassRequest: { + encode(message: _72.QueryClassRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QueryClassRequest; + fromPartial(object: { + classId?: string; + }): _72.QueryClassRequest; + }; + QueryClassResponse: { + encode(message: _72.QueryClassResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QueryClassResponse; + fromPartial(object: { + class?: { + id?: string; + name?: string; + symbol?: string; + description?: string; + uri?: string; + uriHash?: string; + data?: { + typeUrl?: string; + value?: Uint8Array; + }; + }; + }): _72.QueryClassResponse; + }; + QueryClassesRequest: { + encode(message: _72.QueryClassesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QueryClassesRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _72.QueryClassesRequest; + }; + QueryClassesResponse: { + encode(message: _72.QueryClassesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _72.QueryClassesResponse; + fromPartial(object: { + classes?: { + id?: string; + name?: string; + symbol?: string; + description?: string; + uri?: string; + uriHash?: string; + data?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _72.QueryClassesResponse; + }; + Class: { + encode(message: _71.Class, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _71.Class; + fromPartial(object: { + id?: string; + name?: string; + symbol?: string; + description?: string; + uri?: string; + uriHash?: string; + data?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _71.Class; + }; + NFT: { + encode(message: _71.NFT, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _71.NFT; + fromPartial(object: { + classId?: string; + id?: string; + uri?: string; + uriHash?: string; + data?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _71.NFT; + }; + GenesisState: { + encode(message: _70.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _70.GenesisState; + fromPartial(object: { + classes?: { + id?: string; + name?: string; + symbol?: string; + description?: string; + uri?: string; + uriHash?: string; + data?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + entries?: { + owner?: string; + nfts?: { + classId?: string; + id?: string; + uri?: string; + uriHash?: string; + data?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + }[]; + }): _70.GenesisState; + }; + Entry: { + encode(message: _70.Entry, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _70.Entry; + fromPartial(object: { + owner?: string; + nfts?: { + classId?: string; + id?: string; + uri?: string; + uriHash?: string; + data?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + }): _70.Entry; + }; + EventSend: { + encode(message: _69.EventSend, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _69.EventSend; + fromPartial(object: { + classId?: string; + id?: string; + sender?: string; + receiver?: string; + }): _69.EventSend; + }; + EventMint: { + encode(message: _69.EventMint, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _69.EventMint; + fromPartial(object: { + classId?: string; + id?: string; + owner?: string; + }): _69.EventMint; + }; + EventBurn: { + encode(message: _69.EventBurn, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _69.EventBurn; + fromPartial(object: { + classId?: string; + id?: string; + owner?: string; + }): _69.EventBurn; + }; + }; + } + namespace orm { + const v1: { + TableDescriptor: { + encode(message: _74.TableDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _74.TableDescriptor; + fromPartial(object: { + primaryKey?: { + fields?: string; + autoIncrement?: boolean; + }; + index?: { + fields?: string; + id?: number; + unique?: boolean; + }[]; + id?: number; + }): _74.TableDescriptor; + }; + PrimaryKeyDescriptor: { + encode(message: _74.PrimaryKeyDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _74.PrimaryKeyDescriptor; + fromPartial(object: { + fields?: string; + autoIncrement?: boolean; + }): _74.PrimaryKeyDescriptor; + }; + SecondaryIndexDescriptor: { + encode(message: _74.SecondaryIndexDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _74.SecondaryIndexDescriptor; + fromPartial(object: { + fields?: string; + id?: number; + unique?: boolean; + }): _74.SecondaryIndexDescriptor; + }; + SingletonDescriptor: { + encode(message: _74.SingletonDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _74.SingletonDescriptor; + fromPartial(object: { + id?: number; + }): _74.SingletonDescriptor; + }; + }; + const v1alpha1: { + storageTypeFromJSON(object: any): _75.StorageType; + storageTypeToJSON(object: _75.StorageType): string; + StorageType: typeof _75.StorageType; + StorageTypeSDKType: typeof _75.StorageType; + ModuleSchemaDescriptor: { + encode(message: _75.ModuleSchemaDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _75.ModuleSchemaDescriptor; + fromPartial(object: { + schemaFile?: { + id?: number; + protoFileName?: string; + storageType?: _75.StorageType; + }[]; + prefix?: Uint8Array; + }): _75.ModuleSchemaDescriptor; + }; + ModuleSchemaDescriptor_FileEntry: { + encode(message: _75.ModuleSchemaDescriptor_FileEntry, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _75.ModuleSchemaDescriptor_FileEntry; + fromPartial(object: { + id?: number; + protoFileName?: string; + storageType?: _75.StorageType; + }): _75.ModuleSchemaDescriptor_FileEntry; + }; + }; + } + namespace params { + const v1beta1: { + QueryClientImpl: typeof _175.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + params(request: _77.QueryParamsRequest): Promise<_77.QueryParamsResponse>; + subspaces(request?: _77.QuerySubspacesRequest): Promise<_77.QuerySubspacesResponse>; + }; + LCDQueryClient: typeof _157.LCDQueryClient; + QueryParamsRequest: { + encode(message: _77.QueryParamsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _77.QueryParamsRequest; + fromPartial(object: { + subspace?: string; + key?: string; + }): _77.QueryParamsRequest; + }; + QueryParamsResponse: { + encode(message: _77.QueryParamsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _77.QueryParamsResponse; + fromPartial(object: { + param?: { + subspace?: string; + key?: string; + value?: string; + }; + }): _77.QueryParamsResponse; + }; + QuerySubspacesRequest: { + encode(_: _77.QuerySubspacesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _77.QuerySubspacesRequest; + fromPartial(_: {}): _77.QuerySubspacesRequest; + }; + QuerySubspacesResponse: { + encode(message: _77.QuerySubspacesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _77.QuerySubspacesResponse; + fromPartial(object: { + subspaces?: { + subspace?: string; + keys?: string[]; + }[]; + }): _77.QuerySubspacesResponse; + }; + Subspace: { + encode(message: _77.Subspace, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _77.Subspace; + fromPartial(object: { + subspace?: string; + keys?: string[]; + }): _77.Subspace; + }; + ParameterChangeProposal: { + encode(message: _76.ParameterChangeProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _76.ParameterChangeProposal; + fromPartial(object: { + title?: string; + description?: string; + changes?: { + subspace?: string; + key?: string; + value?: string; + }[]; + }): _76.ParameterChangeProposal; + }; + ParamChange: { + encode(message: _76.ParamChange, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _76.ParamChange; + fromPartial(object: { + subspace?: string; + key?: string; + value?: string; + }): _76.ParamChange; + }; + }; + } + namespace slashing { + const v1beta1: { + MsgClientImpl: typeof _190.MsgClientImpl; + QueryClientImpl: typeof _176.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + params(request?: _79.QueryParamsRequest): Promise<_79.QueryParamsResponse>; + signingInfo(request: _79.QuerySigningInfoRequest): Promise<_79.QuerySigningInfoResponse>; + signingInfos(request?: _79.QuerySigningInfosRequest): Promise<_79.QuerySigningInfosResponse>; + }; + LCDQueryClient: typeof _158.LCDQueryClient; + MsgUnjail: { + encode(message: _81.MsgUnjail, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _81.MsgUnjail; + fromPartial(object: { + validatorAddr?: string; + }): _81.MsgUnjail; + }; + MsgUnjailResponse: { + encode(_: _81.MsgUnjailResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _81.MsgUnjailResponse; + fromPartial(_: {}): _81.MsgUnjailResponse; + }; + ValidatorSigningInfo: { + encode(message: _80.ValidatorSigningInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _80.ValidatorSigningInfo; + fromPartial(object: { + address?: string; + startHeight?: string | number | import("long").Long; + indexOffset?: string | number | import("long").Long; + jailedUntil?: Date; + tombstoned?: boolean; + missedBlocksCounter?: string | number | import("long").Long; + }): _80.ValidatorSigningInfo; + }; + Params: { + encode(message: _80.Params, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _80.Params; + fromPartial(object: { + signedBlocksWindow?: string | number | import("long").Long; + minSignedPerWindow?: Uint8Array; + downtimeJailDuration?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + slashFractionDoubleSign?: Uint8Array; + slashFractionDowntime?: Uint8Array; + }): _80.Params; + }; + QueryParamsRequest: { + encode(_: _79.QueryParamsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _79.QueryParamsRequest; + fromPartial(_: {}): _79.QueryParamsRequest; + }; + QueryParamsResponse: { + encode(message: _79.QueryParamsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _79.QueryParamsResponse; + fromPartial(object: { + params?: { + signedBlocksWindow?: string | number | import("long").Long; + minSignedPerWindow?: Uint8Array; + downtimeJailDuration?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + slashFractionDoubleSign?: Uint8Array; + slashFractionDowntime?: Uint8Array; + }; + }): _79.QueryParamsResponse; + }; + QuerySigningInfoRequest: { + encode(message: _79.QuerySigningInfoRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _79.QuerySigningInfoRequest; + fromPartial(object: { + consAddress?: string; + }): _79.QuerySigningInfoRequest; + }; + QuerySigningInfoResponse: { + encode(message: _79.QuerySigningInfoResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _79.QuerySigningInfoResponse; + fromPartial(object: { + valSigningInfo?: { + address?: string; + startHeight?: string | number | import("long").Long; + indexOffset?: string | number | import("long").Long; + jailedUntil?: Date; + tombstoned?: boolean; + missedBlocksCounter?: string | number | import("long").Long; + }; + }): _79.QuerySigningInfoResponse; + }; + QuerySigningInfosRequest: { + encode(message: _79.QuerySigningInfosRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _79.QuerySigningInfosRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _79.QuerySigningInfosRequest; + }; + QuerySigningInfosResponse: { + encode(message: _79.QuerySigningInfosResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _79.QuerySigningInfosResponse; + fromPartial(object: { + info?: { + address?: string; + startHeight?: string | number | import("long").Long; + indexOffset?: string | number | import("long").Long; + jailedUntil?: Date; + tombstoned?: boolean; + missedBlocksCounter?: string | number | import("long").Long; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _79.QuerySigningInfosResponse; + }; + GenesisState: { + encode(message: _78.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _78.GenesisState; + fromPartial(object: { + params?: { + signedBlocksWindow?: string | number | import("long").Long; + minSignedPerWindow?: Uint8Array; + downtimeJailDuration?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + slashFractionDoubleSign?: Uint8Array; + slashFractionDowntime?: Uint8Array; + }; + signingInfos?: { + address?: string; + validatorSigningInfo?: { + address?: string; + startHeight?: string | number | import("long").Long; + indexOffset?: string | number | import("long").Long; + jailedUntil?: Date; + tombstoned?: boolean; + missedBlocksCounter?: string | number | import("long").Long; + }; + }[]; + missedBlocks?: { + address?: string; + missedBlocks?: { + index?: string | number | import("long").Long; + missed?: boolean; + }[]; + }[]; + }): _78.GenesisState; + }; + SigningInfo: { + encode(message: _78.SigningInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _78.SigningInfo; + fromPartial(object: { + address?: string; + validatorSigningInfo?: { + address?: string; + startHeight?: string | number | import("long").Long; + indexOffset?: string | number | import("long").Long; + jailedUntil?: Date; + tombstoned?: boolean; + missedBlocksCounter?: string | number | import("long").Long; + }; + }): _78.SigningInfo; + }; + ValidatorMissedBlocks: { + encode(message: _78.ValidatorMissedBlocks, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _78.ValidatorMissedBlocks; + fromPartial(object: { + address?: string; + missedBlocks?: { + index?: string | number | import("long").Long; + missed?: boolean; + }[]; + }): _78.ValidatorMissedBlocks; + }; + MissedBlock: { + encode(message: _78.MissedBlock, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _78.MissedBlock; + fromPartial(object: { + index?: string | number | import("long").Long; + missed?: boolean; + }): _78.MissedBlock; + }; + }; + } + namespace staking { + const v1beta1: { + MsgClientImpl: typeof _191.MsgClientImpl; + QueryClientImpl: typeof _177.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + validators(request: _84.QueryValidatorsRequest): Promise<_84.QueryValidatorsResponse>; + validator(request: _84.QueryValidatorRequest): Promise<_84.QueryValidatorResponse>; + validatorDelegations(request: _84.QueryValidatorDelegationsRequest): Promise<_84.QueryValidatorDelegationsResponse>; + validatorUnbondingDelegations(request: _84.QueryValidatorUnbondingDelegationsRequest): Promise<_84.QueryValidatorUnbondingDelegationsResponse>; + delegation(request: _84.QueryDelegationRequest): Promise<_84.QueryDelegationResponse>; + unbondingDelegation(request: _84.QueryUnbondingDelegationRequest): Promise<_84.QueryUnbondingDelegationResponse>; + delegatorDelegations(request: _84.QueryDelegatorDelegationsRequest): Promise<_84.QueryDelegatorDelegationsResponse>; + delegatorUnbondingDelegations(request: _84.QueryDelegatorUnbondingDelegationsRequest): Promise<_84.QueryDelegatorUnbondingDelegationsResponse>; + redelegations(request: _84.QueryRedelegationsRequest): Promise<_84.QueryRedelegationsResponse>; + delegatorValidators(request: _84.QueryDelegatorValidatorsRequest): Promise<_84.QueryDelegatorValidatorsResponse>; + delegatorValidator(request: _84.QueryDelegatorValidatorRequest): Promise<_84.QueryDelegatorValidatorResponse>; + historicalInfo(request: _84.QueryHistoricalInfoRequest): Promise<_84.QueryHistoricalInfoResponse>; + pool(request?: _84.QueryPoolRequest): Promise<_84.QueryPoolResponse>; + params(request?: _84.QueryParamsRequest): Promise<_84.QueryParamsResponse>; + }; + LCDQueryClient: typeof _159.LCDQueryClient; + MsgCreateValidator: { + encode(message: _86.MsgCreateValidator, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _86.MsgCreateValidator; + fromPartial(object: { + description?: { + moniker?: string; + identity?: string; + website?: string; + securityContact?: string; + details?: string; + }; + commission?: { + rate?: string; + maxRate?: string; + maxChangeRate?: string; + }; + minSelfDelegation?: string; + delegatorAddress?: string; + validatorAddress?: string; + pubkey?: { + typeUrl?: string; + value?: Uint8Array; + }; + value?: { + denom?: string; + amount?: string; + }; + }): _86.MsgCreateValidator; + }; + MsgCreateValidatorResponse: { + encode(_: _86.MsgCreateValidatorResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _86.MsgCreateValidatorResponse; + fromPartial(_: {}): _86.MsgCreateValidatorResponse; + }; + MsgEditValidator: { + encode(message: _86.MsgEditValidator, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _86.MsgEditValidator; + fromPartial(object: { + description?: { + moniker?: string; + identity?: string; + website?: string; + securityContact?: string; + details?: string; + }; + validatorAddress?: string; + commissionRate?: string; + minSelfDelegation?: string; + }): _86.MsgEditValidator; + }; + MsgEditValidatorResponse: { + encode(_: _86.MsgEditValidatorResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _86.MsgEditValidatorResponse; + fromPartial(_: {}): _86.MsgEditValidatorResponse; + }; + MsgDelegate: { + encode(message: _86.MsgDelegate, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _86.MsgDelegate; + fromPartial(object: { + delegatorAddress?: string; + validatorAddress?: string; + amount?: { + denom?: string; + amount?: string; + }; + }): _86.MsgDelegate; + }; + MsgDelegateResponse: { + encode(_: _86.MsgDelegateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _86.MsgDelegateResponse; + fromPartial(_: {}): _86.MsgDelegateResponse; + }; + MsgBeginRedelegate: { + encode(message: _86.MsgBeginRedelegate, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _86.MsgBeginRedelegate; + fromPartial(object: { + delegatorAddress?: string; + validatorSrcAddress?: string; + validatorDstAddress?: string; + amount?: { + denom?: string; + amount?: string; + }; + }): _86.MsgBeginRedelegate; + }; + MsgBeginRedelegateResponse: { + encode(message: _86.MsgBeginRedelegateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _86.MsgBeginRedelegateResponse; + fromPartial(object: { + completionTime?: Date; + }): _86.MsgBeginRedelegateResponse; + }; + MsgUndelegate: { + encode(message: _86.MsgUndelegate, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _86.MsgUndelegate; + fromPartial(object: { + delegatorAddress?: string; + validatorAddress?: string; + amount?: { + denom?: string; + amount?: string; + }; + }): _86.MsgUndelegate; + }; + MsgUndelegateResponse: { + encode(message: _86.MsgUndelegateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _86.MsgUndelegateResponse; + fromPartial(object: { + completionTime?: Date; + }): _86.MsgUndelegateResponse; + }; + bondStatusFromJSON(object: any): _85.BondStatus; + bondStatusToJSON(object: _85.BondStatus): string; + BondStatus: typeof _85.BondStatus; + BondStatusSDKType: typeof _85.BondStatus; + HistoricalInfo: { + encode(message: _85.HistoricalInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.HistoricalInfo; + fromPartial(object: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + valset?: { + operatorAddress?: string; + consensusPubkey?: { + typeUrl?: string; + value?: Uint8Array; + }; + jailed?: boolean; + status?: _85.BondStatus; + tokens?: string; + delegatorShares?: string; + description?: { + moniker?: string; + identity?: string; + website?: string; + securityContact?: string; + details?: string; + }; + unbondingHeight?: string | number | import("long").Long; + unbondingTime?: Date; + commission?: { + commissionRates?: { + rate?: string; + maxRate?: string; + maxChangeRate?: string; + }; + updateTime?: Date; + }; + minSelfDelegation?: string; + }[]; + }): _85.HistoricalInfo; + }; + CommissionRates: { + encode(message: _85.CommissionRates, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.CommissionRates; + fromPartial(object: { + rate?: string; + maxRate?: string; + maxChangeRate?: string; + }): _85.CommissionRates; + }; + Commission: { + encode(message: _85.Commission, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.Commission; + fromPartial(object: { + commissionRates?: { + rate?: string; + maxRate?: string; + maxChangeRate?: string; + }; + updateTime?: Date; + }): _85.Commission; + }; + Description: { + encode(message: _85.Description, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.Description; + fromPartial(object: { + moniker?: string; + identity?: string; + website?: string; + securityContact?: string; + details?: string; + }): _85.Description; + }; + Validator: { + encode(message: _85.Validator, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.Validator; + fromPartial(object: { + operatorAddress?: string; + consensusPubkey?: { + typeUrl?: string; + value?: Uint8Array; + }; + jailed?: boolean; + status?: _85.BondStatus; + tokens?: string; + delegatorShares?: string; + description?: { + moniker?: string; + identity?: string; + website?: string; + securityContact?: string; + details?: string; + }; + unbondingHeight?: string | number | import("long").Long; + unbondingTime?: Date; + commission?: { + commissionRates?: { + rate?: string; + maxRate?: string; + maxChangeRate?: string; + }; + updateTime?: Date; + }; + minSelfDelegation?: string; + }): _85.Validator; + }; + ValAddresses: { + encode(message: _85.ValAddresses, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.ValAddresses; + fromPartial(object: { + addresses?: string[]; + }): _85.ValAddresses; + }; + DVPair: { + encode(message: _85.DVPair, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.DVPair; + fromPartial(object: { + delegatorAddress?: string; + validatorAddress?: string; + }): _85.DVPair; + }; + DVPairs: { + encode(message: _85.DVPairs, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.DVPairs; + fromPartial(object: { + pairs?: { + delegatorAddress?: string; + validatorAddress?: string; + }[]; + }): _85.DVPairs; + }; + DVVTriplet: { + encode(message: _85.DVVTriplet, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.DVVTriplet; + fromPartial(object: { + delegatorAddress?: string; + validatorSrcAddress?: string; + validatorDstAddress?: string; + }): _85.DVVTriplet; + }; + DVVTriplets: { + encode(message: _85.DVVTriplets, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.DVVTriplets; + fromPartial(object: { + triplets?: { + delegatorAddress?: string; + validatorSrcAddress?: string; + validatorDstAddress?: string; + }[]; + }): _85.DVVTriplets; + }; + Delegation: { + encode(message: _85.Delegation, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.Delegation; + fromPartial(object: { + delegatorAddress?: string; + validatorAddress?: string; + shares?: string; + }): _85.Delegation; + }; + UnbondingDelegation: { + encode(message: _85.UnbondingDelegation, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.UnbondingDelegation; + fromPartial(object: { + delegatorAddress?: string; + validatorAddress?: string; + entries?: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + balance?: string; + }[]; + }): _85.UnbondingDelegation; + }; + UnbondingDelegationEntry: { + encode(message: _85.UnbondingDelegationEntry, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.UnbondingDelegationEntry; + fromPartial(object: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + balance?: string; + }): _85.UnbondingDelegationEntry; + }; + RedelegationEntry: { + encode(message: _85.RedelegationEntry, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.RedelegationEntry; + fromPartial(object: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + sharesDst?: string; + }): _85.RedelegationEntry; + }; + Redelegation: { + encode(message: _85.Redelegation, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.Redelegation; + fromPartial(object: { + delegatorAddress?: string; + validatorSrcAddress?: string; + validatorDstAddress?: string; + entries?: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + sharesDst?: string; + }[]; + }): _85.Redelegation; + }; + Params: { + encode(message: _85.Params, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.Params; + fromPartial(object: { + unbondingTime?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxValidators?: number; + maxEntries?: number; + historicalEntries?: number; + bondDenom?: string; + minCommissionRate?: string; + }): _85.Params; + }; + DelegationResponse: { + encode(message: _85.DelegationResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.DelegationResponse; + fromPartial(object: { + delegation?: { + delegatorAddress?: string; + validatorAddress?: string; + shares?: string; + }; + balance?: { + denom?: string; + amount?: string; + }; + }): _85.DelegationResponse; + }; + RedelegationEntryResponse: { + encode(message: _85.RedelegationEntryResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.RedelegationEntryResponse; + fromPartial(object: { + redelegationEntry?: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + sharesDst?: string; + }; + balance?: string; + }): _85.RedelegationEntryResponse; + }; + RedelegationResponse: { + encode(message: _85.RedelegationResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.RedelegationResponse; + fromPartial(object: { + redelegation?: { + delegatorAddress?: string; + validatorSrcAddress?: string; + validatorDstAddress?: string; + entries?: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + sharesDst?: string; + }[]; + }; + entries?: { + redelegationEntry?: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + sharesDst?: string; + }; + balance?: string; + }[]; + }): _85.RedelegationResponse; + }; + Pool: { + encode(message: _85.Pool, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _85.Pool; + fromPartial(object: { + notBondedTokens?: string; + bondedTokens?: string; + }): _85.Pool; + }; + QueryValidatorsRequest: { + encode(message: _84.QueryValidatorsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryValidatorsRequest; + fromPartial(object: { + status?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _84.QueryValidatorsRequest; + }; + QueryValidatorsResponse: { + encode(message: _84.QueryValidatorsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryValidatorsResponse; + fromPartial(object: { + validators?: { + operatorAddress?: string; + consensusPubkey?: { + typeUrl?: string; + value?: Uint8Array; + }; + jailed?: boolean; + status?: _85.BondStatus; + tokens?: string; + delegatorShares?: string; + description?: { + moniker?: string; + identity?: string; + website?: string; + securityContact?: string; + details?: string; + }; + unbondingHeight?: string | number | import("long").Long; + unbondingTime?: Date; + commission?: { + commissionRates?: { + rate?: string; + maxRate?: string; + maxChangeRate?: string; + }; + updateTime?: Date; + }; + minSelfDelegation?: string; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _84.QueryValidatorsResponse; + }; + QueryValidatorRequest: { + encode(message: _84.QueryValidatorRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryValidatorRequest; + fromPartial(object: { + validatorAddr?: string; + }): _84.QueryValidatorRequest; + }; + QueryValidatorResponse: { + encode(message: _84.QueryValidatorResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryValidatorResponse; + fromPartial(object: { + validator?: { + operatorAddress?: string; + consensusPubkey?: { + typeUrl?: string; + value?: Uint8Array; + }; + jailed?: boolean; + status?: _85.BondStatus; + tokens?: string; + delegatorShares?: string; + description?: { + moniker?: string; + identity?: string; + website?: string; + securityContact?: string; + details?: string; + }; + unbondingHeight?: string | number | import("long").Long; + unbondingTime?: Date; + commission?: { + commissionRates?: { + rate?: string; + maxRate?: string; + maxChangeRate?: string; + }; + updateTime?: Date; + }; + minSelfDelegation?: string; + }; + }): _84.QueryValidatorResponse; + }; + QueryValidatorDelegationsRequest: { + encode(message: _84.QueryValidatorDelegationsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryValidatorDelegationsRequest; + fromPartial(object: { + validatorAddr?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _84.QueryValidatorDelegationsRequest; + }; + QueryValidatorDelegationsResponse: { + encode(message: _84.QueryValidatorDelegationsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryValidatorDelegationsResponse; + fromPartial(object: { + delegationResponses?: { + delegation?: { + delegatorAddress?: string; + validatorAddress?: string; + shares?: string; + }; + balance?: { + denom?: string; + amount?: string; + }; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _84.QueryValidatorDelegationsResponse; + }; + QueryValidatorUnbondingDelegationsRequest: { + encode(message: _84.QueryValidatorUnbondingDelegationsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryValidatorUnbondingDelegationsRequest; + fromPartial(object: { + validatorAddr?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _84.QueryValidatorUnbondingDelegationsRequest; + }; + QueryValidatorUnbondingDelegationsResponse: { + encode(message: _84.QueryValidatorUnbondingDelegationsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryValidatorUnbondingDelegationsResponse; + fromPartial(object: { + unbondingResponses?: { + delegatorAddress?: string; + validatorAddress?: string; + entries?: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + balance?: string; + }[]; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _84.QueryValidatorUnbondingDelegationsResponse; + }; + QueryDelegationRequest: { + encode(message: _84.QueryDelegationRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryDelegationRequest; + fromPartial(object: { + delegatorAddr?: string; + validatorAddr?: string; + }): _84.QueryDelegationRequest; + }; + QueryDelegationResponse: { + encode(message: _84.QueryDelegationResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryDelegationResponse; + fromPartial(object: { + delegationResponse?: { + delegation?: { + delegatorAddress?: string; + validatorAddress?: string; + shares?: string; + }; + balance?: { + denom?: string; + amount?: string; + }; + }; + }): _84.QueryDelegationResponse; + }; + QueryUnbondingDelegationRequest: { + encode(message: _84.QueryUnbondingDelegationRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryUnbondingDelegationRequest; + fromPartial(object: { + delegatorAddr?: string; + validatorAddr?: string; + }): _84.QueryUnbondingDelegationRequest; + }; + QueryUnbondingDelegationResponse: { + encode(message: _84.QueryUnbondingDelegationResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryUnbondingDelegationResponse; + fromPartial(object: { + unbond?: { + delegatorAddress?: string; + validatorAddress?: string; + entries?: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + balance?: string; + }[]; + }; + }): _84.QueryUnbondingDelegationResponse; + }; + QueryDelegatorDelegationsRequest: { + encode(message: _84.QueryDelegatorDelegationsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryDelegatorDelegationsRequest; + fromPartial(object: { + delegatorAddr?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _84.QueryDelegatorDelegationsRequest; + }; + QueryDelegatorDelegationsResponse: { + encode(message: _84.QueryDelegatorDelegationsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryDelegatorDelegationsResponse; + fromPartial(object: { + delegationResponses?: { + delegation?: { + delegatorAddress?: string; + validatorAddress?: string; + shares?: string; + }; + balance?: { + denom?: string; + amount?: string; + }; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _84.QueryDelegatorDelegationsResponse; + }; + QueryDelegatorUnbondingDelegationsRequest: { + encode(message: _84.QueryDelegatorUnbondingDelegationsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryDelegatorUnbondingDelegationsRequest; + fromPartial(object: { + delegatorAddr?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _84.QueryDelegatorUnbondingDelegationsRequest; + }; + QueryDelegatorUnbondingDelegationsResponse: { + encode(message: _84.QueryDelegatorUnbondingDelegationsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryDelegatorUnbondingDelegationsResponse; + fromPartial(object: { + unbondingResponses?: { + delegatorAddress?: string; + validatorAddress?: string; + entries?: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + balance?: string; + }[]; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _84.QueryDelegatorUnbondingDelegationsResponse; + }; + QueryRedelegationsRequest: { + encode(message: _84.QueryRedelegationsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryRedelegationsRequest; + fromPartial(object: { + delegatorAddr?: string; + srcValidatorAddr?: string; + dstValidatorAddr?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _84.QueryRedelegationsRequest; + }; + QueryRedelegationsResponse: { + encode(message: _84.QueryRedelegationsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryRedelegationsResponse; + fromPartial(object: { + redelegationResponses?: { + redelegation?: { + delegatorAddress?: string; + validatorSrcAddress?: string; + validatorDstAddress?: string; + entries?: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + sharesDst?: string; + }[]; + }; + entries?: { + redelegationEntry?: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + sharesDst?: string; + }; + balance?: string; + }[]; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _84.QueryRedelegationsResponse; + }; + QueryDelegatorValidatorsRequest: { + encode(message: _84.QueryDelegatorValidatorsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryDelegatorValidatorsRequest; + fromPartial(object: { + delegatorAddr?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _84.QueryDelegatorValidatorsRequest; + }; + QueryDelegatorValidatorsResponse: { + encode(message: _84.QueryDelegatorValidatorsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryDelegatorValidatorsResponse; + fromPartial(object: { + validators?: { + operatorAddress?: string; + consensusPubkey?: { + typeUrl?: string; + value?: Uint8Array; + }; + jailed?: boolean; + status?: _85.BondStatus; + tokens?: string; + delegatorShares?: string; + description?: { + moniker?: string; + identity?: string; + website?: string; + securityContact?: string; + details?: string; + }; + unbondingHeight?: string | number | import("long").Long; + unbondingTime?: Date; + commission?: { + commissionRates?: { + rate?: string; + maxRate?: string; + maxChangeRate?: string; + }; + updateTime?: Date; + }; + minSelfDelegation?: string; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _84.QueryDelegatorValidatorsResponse; + }; + QueryDelegatorValidatorRequest: { + encode(message: _84.QueryDelegatorValidatorRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryDelegatorValidatorRequest; + fromPartial(object: { + delegatorAddr?: string; + validatorAddr?: string; + }): _84.QueryDelegatorValidatorRequest; + }; + QueryDelegatorValidatorResponse: { + encode(message: _84.QueryDelegatorValidatorResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryDelegatorValidatorResponse; + fromPartial(object: { + validator?: { + operatorAddress?: string; + consensusPubkey?: { + typeUrl?: string; + value?: Uint8Array; + }; + jailed?: boolean; + status?: _85.BondStatus; + tokens?: string; + delegatorShares?: string; + description?: { + moniker?: string; + identity?: string; + website?: string; + securityContact?: string; + details?: string; + }; + unbondingHeight?: string | number | import("long").Long; + unbondingTime?: Date; + commission?: { + commissionRates?: { + rate?: string; + maxRate?: string; + maxChangeRate?: string; + }; + updateTime?: Date; + }; + minSelfDelegation?: string; + }; + }): _84.QueryDelegatorValidatorResponse; + }; + QueryHistoricalInfoRequest: { + encode(message: _84.QueryHistoricalInfoRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryHistoricalInfoRequest; + fromPartial(object: { + height?: string | number | import("long").Long; + }): _84.QueryHistoricalInfoRequest; + }; + QueryHistoricalInfoResponse: { + encode(message: _84.QueryHistoricalInfoResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryHistoricalInfoResponse; + fromPartial(object: { + hist?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + valset?: { + operatorAddress?: string; + consensusPubkey?: { + typeUrl?: string; + value?: Uint8Array; + }; + jailed?: boolean; + status?: _85.BondStatus; + tokens?: string; + delegatorShares?: string; + description?: { + moniker?: string; + identity?: string; + website?: string; + securityContact?: string; + details?: string; + }; + unbondingHeight?: string | number | import("long").Long; + unbondingTime?: Date; + commission?: { + commissionRates?: { + rate?: string; + maxRate?: string; + maxChangeRate?: string; + }; + updateTime?: Date; + }; + minSelfDelegation?: string; + }[]; + }; + }): _84.QueryHistoricalInfoResponse; + }; + QueryPoolRequest: { + encode(_: _84.QueryPoolRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryPoolRequest; + fromPartial(_: {}): _84.QueryPoolRequest; + }; + QueryPoolResponse: { + encode(message: _84.QueryPoolResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryPoolResponse; + fromPartial(object: { + pool?: { + notBondedTokens?: string; + bondedTokens?: string; + }; + }): _84.QueryPoolResponse; + }; + QueryParamsRequest: { + encode(_: _84.QueryParamsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryParamsRequest; + fromPartial(_: {}): _84.QueryParamsRequest; + }; + QueryParamsResponse: { + encode(message: _84.QueryParamsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _84.QueryParamsResponse; + fromPartial(object: { + params?: { + unbondingTime?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxValidators?: number; + maxEntries?: number; + historicalEntries?: number; + bondDenom?: string; + minCommissionRate?: string; + }; + }): _84.QueryParamsResponse; + }; + GenesisState: { + encode(message: _83.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _83.GenesisState; + fromPartial(object: { + params?: { + unbondingTime?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxValidators?: number; + maxEntries?: number; + historicalEntries?: number; + bondDenom?: string; + minCommissionRate?: string; + }; + lastTotalPower?: Uint8Array; + lastValidatorPowers?: { + address?: string; + power?: string | number | import("long").Long; + }[]; + validators?: { + operatorAddress?: string; + consensusPubkey?: { + typeUrl?: string; + value?: Uint8Array; + }; + jailed?: boolean; + status?: _85.BondStatus; + tokens?: string; + delegatorShares?: string; + description?: { + moniker?: string; + identity?: string; + website?: string; + securityContact?: string; + details?: string; + }; + unbondingHeight?: string | number | import("long").Long; + unbondingTime?: Date; + commission?: { + commissionRates?: { + rate?: string; + maxRate?: string; + maxChangeRate?: string; + }; + updateTime?: Date; + }; + minSelfDelegation?: string; + }[]; + delegations?: { + delegatorAddress?: string; + validatorAddress?: string; + shares?: string; + }[]; + unbondingDelegations?: { + delegatorAddress?: string; + validatorAddress?: string; + entries?: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + balance?: string; + }[]; + }[]; + redelegations?: { + delegatorAddress?: string; + validatorSrcAddress?: string; + validatorDstAddress?: string; + entries?: { + creationHeight?: string | number | import("long").Long; + completionTime?: Date; + initialBalance?: string; + sharesDst?: string; + }[]; + }[]; + exported?: boolean; + }): _83.GenesisState; + }; + LastValidatorPower: { + encode(message: _83.LastValidatorPower, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _83.LastValidatorPower; + fromPartial(object: { + address?: string; + power?: string | number | import("long").Long; + }): _83.LastValidatorPower; + }; + authorizationTypeFromJSON(object: any): _82.AuthorizationType; + authorizationTypeToJSON(object: _82.AuthorizationType): string; + AuthorizationType: typeof _82.AuthorizationType; + AuthorizationTypeSDKType: typeof _82.AuthorizationType; + StakeAuthorization: { + encode(message: _82.StakeAuthorization, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _82.StakeAuthorization; + fromPartial(object: { + maxTokens?: { + denom?: string; + amount?: string; + }; + allowList?: { + address?: string[]; + }; + denyList?: { + address?: string[]; + }; + authorizationType?: _82.AuthorizationType; + }): _82.StakeAuthorization; + }; + StakeAuthorization_Validators: { + encode(message: _82.StakeAuthorization_Validators, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _82.StakeAuthorization_Validators; + fromPartial(object: { + address?: string[]; + }): _82.StakeAuthorization_Validators; + }; + }; + } + namespace tx { + namespace signing { + const v1beta1: { + signModeFromJSON(object: any): _87.SignMode; + signModeToJSON(object: _87.SignMode): string; + SignMode: typeof _87.SignMode; + SignModeSDKType: typeof _87.SignMode; + SignatureDescriptors: { + encode(message: _87.SignatureDescriptors, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _87.SignatureDescriptors; + fromPartial(object: { + signatures?: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + data?: { + single?: { + mode?: _87.SignMode; + signature?: Uint8Array; + }; + multi?: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + signatures?: any[]; + }; + }; + sequence?: string | number | import("long").Long; + }[]; + }): _87.SignatureDescriptors; + }; + SignatureDescriptor: { + encode(message: _87.SignatureDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _87.SignatureDescriptor; + fromPartial(object: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + data?: { + single?: { + mode?: _87.SignMode; + signature?: Uint8Array; + }; + multi?: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + signatures?: any[]; + }; + }; + sequence?: string | number | import("long").Long; + }): _87.SignatureDescriptor; + }; + SignatureDescriptor_Data: { + encode(message: _87.SignatureDescriptor_Data, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _87.SignatureDescriptor_Data; + fromPartial(object: { + single?: { + mode?: _87.SignMode; + signature?: Uint8Array; + }; + multi?: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + signatures?: any[]; + }; + }): _87.SignatureDescriptor_Data; + }; + SignatureDescriptor_Data_Single: { + encode(message: _87.SignatureDescriptor_Data_Single, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _87.SignatureDescriptor_Data_Single; + fromPartial(object: { + mode?: _87.SignMode; + signature?: Uint8Array; + }): _87.SignatureDescriptor_Data_Single; + }; + SignatureDescriptor_Data_Multi: { + encode(message: _87.SignatureDescriptor_Data_Multi, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _87.SignatureDescriptor_Data_Multi; + fromPartial(object: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + signatures?: any[]; + }): _87.SignatureDescriptor_Data_Multi; + }; + }; + } + const v1beta1: { + ServiceClientImpl: typeof _178.ServiceClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + simulate(request: _88.SimulateRequest): Promise<_88.SimulateResponse>; + getTx(request: _88.GetTxRequest): Promise<_88.GetTxResponse>; + broadcastTx(request: _88.BroadcastTxRequest): Promise<_88.BroadcastTxResponse>; + getTxsEvent(request: _88.GetTxsEventRequest): Promise<_88.GetTxsEventResponse>; + getBlockWithTxs(request: _88.GetBlockWithTxsRequest): Promise<_88.GetBlockWithTxsResponse>; + }; + LCDQueryClient: typeof _160.LCDQueryClient; + Tx: { + encode(message: _89.Tx, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.Tx; + fromPartial(object: { + body?: { + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + memo?: string; + timeoutHeight?: string | number | import("long").Long; + extensionOptions?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + nonCriticalExtensionOptions?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }; + authInfo?: { + signerInfos?: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + modeInfo?: { + single?: { + mode?: _87.SignMode; + }; + multi?: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + modeInfos?: any[]; + }; + }; + sequence?: string | number | import("long").Long; + }[]; + fee?: { + amount?: { + denom?: string; + amount?: string; + }[]; + gasLimit?: string | number | import("long").Long; + payer?: string; + granter?: string; + }; + tip?: { + amount?: { + denom?: string; + amount?: string; + }[]; + tipper?: string; + }; + }; + signatures?: Uint8Array[]; + }): _89.Tx; + }; + TxRaw: { + encode(message: _89.TxRaw, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.TxRaw; + fromPartial(object: { + bodyBytes?: Uint8Array; + authInfoBytes?: Uint8Array; + signatures?: Uint8Array[]; + }): _89.TxRaw; + }; + SignDoc: { + encode(message: _89.SignDoc, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.SignDoc; + fromPartial(object: { + bodyBytes?: Uint8Array; + authInfoBytes?: Uint8Array; + chainId?: string; + accountNumber?: string | number | import("long").Long; + }): _89.SignDoc; + }; + SignDocDirectAux: { + encode(message: _89.SignDocDirectAux, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.SignDocDirectAux; + fromPartial(object: { + bodyBytes?: Uint8Array; + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + chainId?: string; + accountNumber?: string | number | import("long").Long; + sequence?: string | number | import("long").Long; + tip?: { + amount?: { + denom?: string; + amount?: string; + }[]; + tipper?: string; + }; + }): _89.SignDocDirectAux; + }; + TxBody: { + encode(message: _89.TxBody, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.TxBody; + fromPartial(object: { + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + memo?: string; + timeoutHeight?: string | number | import("long").Long; + extensionOptions?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + nonCriticalExtensionOptions?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }): _89.TxBody; + }; + AuthInfo: { + encode(message: _89.AuthInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.AuthInfo; + fromPartial(object: { + signerInfos?: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + modeInfo?: { + single?: { + mode?: _87.SignMode; + }; + multi?: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + modeInfos?: any[]; + }; + }; + sequence?: string | number | import("long").Long; + }[]; + fee?: { + amount?: { + denom?: string; + amount?: string; + }[]; + gasLimit?: string | number | import("long").Long; + payer?: string; + granter?: string; + }; + tip?: { + amount?: { + denom?: string; + amount?: string; + }[]; + tipper?: string; + }; + }): _89.AuthInfo; + }; + SignerInfo: { + encode(message: _89.SignerInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.SignerInfo; + fromPartial(object: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + modeInfo?: { + single?: { + mode?: _87.SignMode; + }; + multi?: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + modeInfos?: any[]; + }; + }; + sequence?: string | number | import("long").Long; + }): _89.SignerInfo; + }; + ModeInfo: { + encode(message: _89.ModeInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.ModeInfo; + fromPartial(object: { + single?: { + mode?: _87.SignMode; + }; + multi?: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + modeInfos?: any[]; + }; + }): _89.ModeInfo; + }; + ModeInfo_Single: { + encode(message: _89.ModeInfo_Single, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.ModeInfo_Single; + fromPartial(object: { + mode?: _87.SignMode; + }): _89.ModeInfo_Single; + }; + ModeInfo_Multi: { + encode(message: _89.ModeInfo_Multi, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.ModeInfo_Multi; + fromPartial(object: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + modeInfos?: any[]; + }): _89.ModeInfo_Multi; + }; + Fee: { + encode(message: _89.Fee, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.Fee; + fromPartial(object: { + amount?: { + denom?: string; + amount?: string; + }[]; + gasLimit?: string | number | import("long").Long; + payer?: string; + granter?: string; + }): _89.Fee; + }; + Tip: { + encode(message: _89.Tip, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.Tip; + fromPartial(object: { + amount?: { + denom?: string; + amount?: string; + }[]; + tipper?: string; + }): _89.Tip; + }; + AuxSignerData: { + encode(message: _89.AuxSignerData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _89.AuxSignerData; + fromPartial(object: { + address?: string; + signDoc?: { + bodyBytes?: Uint8Array; + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + chainId?: string; + accountNumber?: string | number | import("long").Long; + sequence?: string | number | import("long").Long; + tip?: { + amount?: { + denom?: string; + amount?: string; + }[]; + tipper?: string; + }; + }; + mode?: _87.SignMode; + sig?: Uint8Array; + }): _89.AuxSignerData; + }; + orderByFromJSON(object: any): _88.OrderBy; + orderByToJSON(object: _88.OrderBy): string; + broadcastModeFromJSON(object: any): _88.BroadcastMode; + broadcastModeToJSON(object: _88.BroadcastMode): string; + OrderBy: typeof _88.OrderBy; + OrderBySDKType: typeof _88.OrderBy; + BroadcastMode: typeof _88.BroadcastMode; + BroadcastModeSDKType: typeof _88.BroadcastMode; + GetTxsEventRequest: { + encode(message: _88.GetTxsEventRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _88.GetTxsEventRequest; + fromPartial(object: { + events?: string[]; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + orderBy?: _88.OrderBy; + }): _88.GetTxsEventRequest; + }; + GetTxsEventResponse: { + encode(message: _88.GetTxsEventResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _88.GetTxsEventResponse; + fromPartial(object: { + txs?: { + body?: { + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + memo?: string; + timeoutHeight?: string | number | import("long").Long; + extensionOptions?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + nonCriticalExtensionOptions?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }; + authInfo?: { + signerInfos?: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + modeInfo?: { + single?: { + mode?: _87.SignMode; + }; + multi?: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + modeInfos?: any[]; + }; + }; + sequence?: string | number | import("long").Long; + }[]; + fee?: { + amount?: { + denom?: string; + amount?: string; + }[]; + gasLimit?: string | number | import("long").Long; + payer?: string; + granter?: string; + }; + tip?: { + amount?: { + denom?: string; + amount?: string; + }[]; + tipper?: string; + }; + }; + signatures?: Uint8Array[]; + }[]; + txResponses?: { + height?: string | number | import("long").Long; + txhash?: string; + codespace?: string; + code?: number; + data?: string; + rawLog?: string; + logs?: { + msgIndex?: number; + log?: string; + events?: { + type?: string; + attributes?: { + key?: string; + value?: string; + }[]; + }[]; + }[]; + info?: string; + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + tx?: { + typeUrl?: string; + value?: Uint8Array; + }; + timestamp?: string; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _88.GetTxsEventResponse; + }; + BroadcastTxRequest: { + encode(message: _88.BroadcastTxRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _88.BroadcastTxRequest; + fromPartial(object: { + txBytes?: Uint8Array; + mode?: _88.BroadcastMode; + }): _88.BroadcastTxRequest; + }; + BroadcastTxResponse: { + encode(message: _88.BroadcastTxResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _88.BroadcastTxResponse; + fromPartial(object: { + txResponse?: { + height?: string | number | import("long").Long; + txhash?: string; + codespace?: string; + code?: number; + data?: string; + rawLog?: string; + logs?: { + msgIndex?: number; + log?: string; + events?: { + type?: string; + attributes?: { + key?: string; + value?: string; + }[]; + }[]; + }[]; + info?: string; + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + tx?: { + typeUrl?: string; + value?: Uint8Array; + }; + timestamp?: string; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + }; + }): _88.BroadcastTxResponse; + }; + SimulateRequest: { + encode(message: _88.SimulateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _88.SimulateRequest; + fromPartial(object: { + tx?: { + body?: { + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + memo?: string; + timeoutHeight?: string | number | import("long").Long; + extensionOptions?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + nonCriticalExtensionOptions?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }; + authInfo?: { + signerInfos?: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + modeInfo?: { + single?: { + mode?: _87.SignMode; + }; + multi?: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + modeInfos?: any[]; + }; + }; + sequence?: string | number | import("long").Long; + }[]; + fee?: { + amount?: { + denom?: string; + amount?: string; + }[]; + gasLimit?: string | number | import("long").Long; + payer?: string; + granter?: string; + }; + tip?: { + amount?: { + denom?: string; + amount?: string; + }[]; + tipper?: string; + }; + }; + signatures?: Uint8Array[]; + }; + txBytes?: Uint8Array; + }): _88.SimulateRequest; + }; + SimulateResponse: { + encode(message: _88.SimulateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _88.SimulateResponse; + fromPartial(object: { + gasInfo?: { + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + }; + result?: { + data?: Uint8Array; + log?: string; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + msgResponses?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }; + }): _88.SimulateResponse; + }; + GetTxRequest: { + encode(message: _88.GetTxRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _88.GetTxRequest; + fromPartial(object: { + hash?: string; + }): _88.GetTxRequest; + }; + GetTxResponse: { + encode(message: _88.GetTxResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _88.GetTxResponse; + fromPartial(object: { + tx?: { + body?: { + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + memo?: string; + timeoutHeight?: string | number | import("long").Long; + extensionOptions?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + nonCriticalExtensionOptions?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }; + authInfo?: { + signerInfos?: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + modeInfo?: { + single?: { + mode?: _87.SignMode; + }; + multi?: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + modeInfos?: any[]; + }; + }; + sequence?: string | number | import("long").Long; + }[]; + fee?: { + amount?: { + denom?: string; + amount?: string; + }[]; + gasLimit?: string | number | import("long").Long; + payer?: string; + granter?: string; + }; + tip?: { + amount?: { + denom?: string; + amount?: string; + }[]; + tipper?: string; + }; + }; + signatures?: Uint8Array[]; + }; + txResponse?: { + height?: string | number | import("long").Long; + txhash?: string; + codespace?: string; + code?: number; + data?: string; + rawLog?: string; + logs?: { + msgIndex?: number; + log?: string; + events?: { + type?: string; + attributes?: { + key?: string; + value?: string; + }[]; + }[]; + }[]; + info?: string; + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + tx?: { + typeUrl?: string; + value?: Uint8Array; + }; + timestamp?: string; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + }; + }): _88.GetTxResponse; + }; + GetBlockWithTxsRequest: { + encode(message: _88.GetBlockWithTxsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _88.GetBlockWithTxsRequest; + fromPartial(object: { + height?: string | number | import("long").Long; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _88.GetBlockWithTxsRequest; + }; + GetBlockWithTxsResponse: { + encode(message: _88.GetBlockWithTxsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _88.GetBlockWithTxsResponse; + fromPartial(object: { + txs?: { + body?: { + messages?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + memo?: string; + timeoutHeight?: string | number | import("long").Long; + extensionOptions?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + nonCriticalExtensionOptions?: { + typeUrl?: string; + value?: Uint8Array; + }[]; + }; + authInfo?: { + signerInfos?: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + modeInfo?: { + single?: { + mode?: _87.SignMode; + }; + multi?: { + bitarray?: { + extraBitsStored?: number; + elems?: Uint8Array; + }; + modeInfos?: any[]; + }; + }; + sequence?: string | number | import("long").Long; + }[]; + fee?: { + amount?: { + denom?: string; + amount?: string; + }[]; + gasLimit?: string | number | import("long").Long; + payer?: string; + granter?: string; + }; + tip?: { + amount?: { + denom?: string; + amount?: string; + }[]; + tipper?: string; + }; + }; + signatures?: Uint8Array[]; + }[]; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + block?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + data?: { + txs?: Uint8Array[]; + }; + evidence?: { + evidence?: { + duplicateVoteEvidence?: { + voteA?: { + type?: import("../tendermint/types/types").SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + voteB?: { + type?: import("../tendermint/types/types").SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + totalVotingPower?: string | number | import("long").Long; + validatorPower?: string | number | import("long").Long; + timestamp?: Date; + }; + lightClientAttackEvidence?: { + conflictingBlock?: { + signedHeader?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + commit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: import("../tendermint/types/types").BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + validatorSet?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + }; + commonHeight?: string | number | import("long").Long; + byzantineValidators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + totalVotingPower?: string | number | import("long").Long; + timestamp?: Date; + }; + }[]; + }; + lastCommit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: import("../tendermint/types/types").BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _88.GetBlockWithTxsResponse; + }; + }; + } + namespace upgrade { + const v1beta1: { + MsgClientImpl: typeof _192.MsgClientImpl; + QueryClientImpl: typeof _179.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + currentPlan(request?: _90.QueryCurrentPlanRequest): Promise<_90.QueryCurrentPlanResponse>; + appliedPlan(request: _90.QueryAppliedPlanRequest): Promise<_90.QueryAppliedPlanResponse>; + upgradedConsensusState(request: _90.QueryUpgradedConsensusStateRequest): Promise<_90.QueryUpgradedConsensusStateResponse>; + moduleVersions(request: _90.QueryModuleVersionsRequest): Promise<_90.QueryModuleVersionsResponse>; + authority(request?: _90.QueryAuthorityRequest): Promise<_90.QueryAuthorityResponse>; + }; + LCDQueryClient: typeof _161.LCDQueryClient; + Plan: { + encode(message: _92.Plan, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _92.Plan; + fromPartial(object: { + name?: string; + time?: Date; + height?: string | number | import("long").Long; + info?: string; + upgradedClientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _92.Plan; + }; + SoftwareUpgradeProposal: { + encode(message: _92.SoftwareUpgradeProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _92.SoftwareUpgradeProposal; + fromPartial(object: { + title?: string; + description?: string; + plan?: { + name?: string; + time?: Date; + height?: string | number | import("long").Long; + info?: string; + upgradedClientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }; + }): _92.SoftwareUpgradeProposal; + }; + CancelSoftwareUpgradeProposal: { + encode(message: _92.CancelSoftwareUpgradeProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _92.CancelSoftwareUpgradeProposal; + fromPartial(object: { + title?: string; + description?: string; + }): _92.CancelSoftwareUpgradeProposal; + }; + ModuleVersion: { + encode(message: _92.ModuleVersion, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _92.ModuleVersion; + fromPartial(object: { + name?: string; + version?: string | number | import("long").Long; + }): _92.ModuleVersion; + }; + MsgSoftwareUpgrade: { + encode(message: _91.MsgSoftwareUpgrade, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _91.MsgSoftwareUpgrade; + fromPartial(object: { + authority?: string; + plan?: { + name?: string; + time?: Date; + height?: string | number | import("long").Long; + info?: string; + upgradedClientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }; + }): _91.MsgSoftwareUpgrade; + }; + MsgSoftwareUpgradeResponse: { + encode(_: _91.MsgSoftwareUpgradeResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _91.MsgSoftwareUpgradeResponse; + fromPartial(_: {}): _91.MsgSoftwareUpgradeResponse; + }; + MsgCancelUpgrade: { + encode(message: _91.MsgCancelUpgrade, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _91.MsgCancelUpgrade; + fromPartial(object: { + authority?: string; + }): _91.MsgCancelUpgrade; + }; + MsgCancelUpgradeResponse: { + encode(_: _91.MsgCancelUpgradeResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _91.MsgCancelUpgradeResponse; + fromPartial(_: {}): _91.MsgCancelUpgradeResponse; + }; + QueryCurrentPlanRequest: { + encode(_: _90.QueryCurrentPlanRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _90.QueryCurrentPlanRequest; + fromPartial(_: {}): _90.QueryCurrentPlanRequest; + }; + QueryCurrentPlanResponse: { + encode(message: _90.QueryCurrentPlanResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _90.QueryCurrentPlanResponse; + fromPartial(object: { + plan?: { + name?: string; + time?: Date; + height?: string | number | import("long").Long; + info?: string; + upgradedClientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }; + }): _90.QueryCurrentPlanResponse; + }; + QueryAppliedPlanRequest: { + encode(message: _90.QueryAppliedPlanRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _90.QueryAppliedPlanRequest; + fromPartial(object: { + name?: string; + }): _90.QueryAppliedPlanRequest; + }; + QueryAppliedPlanResponse: { + encode(message: _90.QueryAppliedPlanResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _90.QueryAppliedPlanResponse; + fromPartial(object: { + height?: string | number | import("long").Long; + }): _90.QueryAppliedPlanResponse; + }; + QueryUpgradedConsensusStateRequest: { + encode(message: _90.QueryUpgradedConsensusStateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _90.QueryUpgradedConsensusStateRequest; + fromPartial(object: { + lastHeight?: string | number | import("long").Long; + }): _90.QueryUpgradedConsensusStateRequest; + }; + QueryUpgradedConsensusStateResponse: { + encode(message: _90.QueryUpgradedConsensusStateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _90.QueryUpgradedConsensusStateResponse; + fromPartial(object: { + upgradedConsensusState?: Uint8Array; + }): _90.QueryUpgradedConsensusStateResponse; + }; + QueryModuleVersionsRequest: { + encode(message: _90.QueryModuleVersionsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _90.QueryModuleVersionsRequest; + fromPartial(object: { + moduleName?: string; + }): _90.QueryModuleVersionsRequest; + }; + QueryModuleVersionsResponse: { + encode(message: _90.QueryModuleVersionsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _90.QueryModuleVersionsResponse; + fromPartial(object: { + moduleVersions?: { + name?: string; + version?: string | number | import("long").Long; + }[]; + }): _90.QueryModuleVersionsResponse; + }; + QueryAuthorityRequest: { + encode(_: _90.QueryAuthorityRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _90.QueryAuthorityRequest; + fromPartial(_: {}): _90.QueryAuthorityRequest; + }; + QueryAuthorityResponse: { + encode(message: _90.QueryAuthorityResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _90.QueryAuthorityResponse; + fromPartial(object: { + address?: string; + }): _90.QueryAuthorityResponse; + }; + }; + } + namespace vesting { + const v1beta1: { + MsgClientImpl: typeof _193.MsgClientImpl; + BaseVestingAccount: { + encode(message: _94.BaseVestingAccount, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _94.BaseVestingAccount; + fromPartial(object: { + baseAccount?: { + address?: string; + pubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + accountNumber?: string | number | import("long").Long; + sequence?: string | number | import("long").Long; + }; + originalVesting?: { + denom?: string; + amount?: string; + }[]; + delegatedFree?: { + denom?: string; + amount?: string; + }[]; + delegatedVesting?: { + denom?: string; + amount?: string; + }[]; + endTime?: string | number | import("long").Long; + }): _94.BaseVestingAccount; + }; + ContinuousVestingAccount: { + encode(message: _94.ContinuousVestingAccount, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _94.ContinuousVestingAccount; + fromPartial(object: { + baseVestingAccount?: { + baseAccount?: { + address?: string; + pubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + accountNumber?: string | number | import("long").Long; + sequence?: string | number | import("long").Long; + }; + originalVesting?: { + denom?: string; + amount?: string; + }[]; + delegatedFree?: { + denom?: string; + amount?: string; + }[]; + delegatedVesting?: { + denom?: string; + amount?: string; + }[]; + endTime?: string | number | import("long").Long; + }; + startTime?: string | number | import("long").Long; + }): _94.ContinuousVestingAccount; + }; + DelayedVestingAccount: { + encode(message: _94.DelayedVestingAccount, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _94.DelayedVestingAccount; + fromPartial(object: { + baseVestingAccount?: { + baseAccount?: { + address?: string; + pubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + accountNumber?: string | number | import("long").Long; + sequence?: string | number | import("long").Long; + }; + originalVesting?: { + denom?: string; + amount?: string; + }[]; + delegatedFree?: { + denom?: string; + amount?: string; + }[]; + delegatedVesting?: { + denom?: string; + amount?: string; + }[]; + endTime?: string | number | import("long").Long; + }; + }): _94.DelayedVestingAccount; + }; + Period: { + encode(message: _94.Period, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _94.Period; + fromPartial(object: { + length?: string | number | import("long").Long; + amount?: { + denom?: string; + amount?: string; + }[]; + }): _94.Period; + }; + PeriodicVestingAccount: { + encode(message: _94.PeriodicVestingAccount, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _94.PeriodicVestingAccount; + fromPartial(object: { + baseVestingAccount?: { + baseAccount?: { + address?: string; + pubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + accountNumber?: string | number | import("long").Long; + sequence?: string | number | import("long").Long; + }; + originalVesting?: { + denom?: string; + amount?: string; + }[]; + delegatedFree?: { + denom?: string; + amount?: string; + }[]; + delegatedVesting?: { + denom?: string; + amount?: string; + }[]; + endTime?: string | number | import("long").Long; + }; + startTime?: string | number | import("long").Long; + vestingPeriods?: { + length?: string | number | import("long").Long; + amount?: { + denom?: string; + amount?: string; + }[]; + }[]; + }): _94.PeriodicVestingAccount; + }; + PermanentLockedAccount: { + encode(message: _94.PermanentLockedAccount, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _94.PermanentLockedAccount; + fromPartial(object: { + baseVestingAccount?: { + baseAccount?: { + address?: string; + pubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + accountNumber?: string | number | import("long").Long; + sequence?: string | number | import("long").Long; + }; + originalVesting?: { + denom?: string; + amount?: string; + }[]; + delegatedFree?: { + denom?: string; + amount?: string; + }[]; + delegatedVesting?: { + denom?: string; + amount?: string; + }[]; + endTime?: string | number | import("long").Long; + }; + }): _94.PermanentLockedAccount; + }; + MsgCreateVestingAccount: { + encode(message: _93.MsgCreateVestingAccount, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _93.MsgCreateVestingAccount; + fromPartial(object: { + fromAddress?: string; + toAddress?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + endTime?: string | number | import("long").Long; + delayed?: boolean; + }): _93.MsgCreateVestingAccount; + }; + MsgCreateVestingAccountResponse: { + encode(_: _93.MsgCreateVestingAccountResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _93.MsgCreateVestingAccountResponse; + fromPartial(_: {}): _93.MsgCreateVestingAccountResponse; + }; + MsgCreatePermanentLockedAccount: { + encode(message: _93.MsgCreatePermanentLockedAccount, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _93.MsgCreatePermanentLockedAccount; + fromPartial(object: { + fromAddress?: string; + toAddress?: string; + amount?: { + denom?: string; + amount?: string; + }[]; + }): _93.MsgCreatePermanentLockedAccount; + }; + MsgCreatePermanentLockedAccountResponse: { + encode(_: _93.MsgCreatePermanentLockedAccountResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _93.MsgCreatePermanentLockedAccountResponse; + fromPartial(_: {}): _93.MsgCreatePermanentLockedAccountResponse; + }; + MsgCreatePeriodicVestingAccount: { + encode(message: _93.MsgCreatePeriodicVestingAccount, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _93.MsgCreatePeriodicVestingAccount; + fromPartial(object: { + fromAddress?: string; + toAddress?: string; + startTime?: string | number | import("long").Long; + vestingPeriods?: { + length?: string | number | import("long").Long; + amount?: { + denom?: string; + amount?: string; + }[]; + }[]; + }): _93.MsgCreatePeriodicVestingAccount; + }; + MsgCreatePeriodicVestingAccountResponse: { + encode(_: _93.MsgCreatePeriodicVestingAccountResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _93.MsgCreatePeriodicVestingAccountResponse; + fromPartial(_: {}): _93.MsgCreatePeriodicVestingAccountResponse; + }; + }; + } + const ClientFactory: { + createRPCMsgClient: ({ rpc }: { + rpc: import("../helpers").Rpc; + }) => Promise<{ + cosmos: { + authz: { + v1beta1: _180.MsgClientImpl; + }; + bank: { + v1beta1: _181.MsgClientImpl; + }; + crisis: { + v1beta1: _182.MsgClientImpl; + }; + distribution: { + v1beta1: _183.MsgClientImpl; + }; + evidence: { + v1beta1: _184.MsgClientImpl; + }; + feegrant: { + v1beta1: _185.MsgClientImpl; + }; + gov: { + v1: _186.MsgClientImpl; + v1beta1: _187.MsgClientImpl; + }; + group: { + v1: _188.MsgClientImpl; + }; + nft: { + v1beta1: _189.MsgClientImpl; + }; + slashing: { + v1beta1: _190.MsgClientImpl; + }; + staking: { + v1beta1: _191.MsgClientImpl; + }; + upgrade: { + v1beta1: _192.MsgClientImpl; + }; + vesting: { + v1beta1: _193.MsgClientImpl; + }; + }; + }>; + createRPCQueryClient: ({ rpcEndpoint }: { + rpcEndpoint: string | import("@cosmjs/tendermint-rpc").HttpEndpoint; + }) => Promise<{ + cosmos: { + app: { + v1alpha1: { + config(request?: _5.QueryConfigRequest): Promise<_5.QueryConfigResponse>; + }; + }; + auth: { + v1beta1: { + accounts(request?: _8.QueryAccountsRequest): Promise<_8.QueryAccountsResponse>; + account(request: _8.QueryAccountRequest): Promise<_8.QueryAccountResponse>; + params(request?: _8.QueryParamsRequest): Promise<_8.QueryParamsResponse>; + moduleAccounts(request?: _8.QueryModuleAccountsRequest): Promise<_8.QueryModuleAccountsResponse>; + bech32Prefix(request?: _8.Bech32PrefixRequest): Promise<_8.Bech32PrefixResponse>; + addressBytesToString(request: _8.AddressBytesToStringRequest): Promise<_8.AddressBytesToStringResponse>; + addressStringToBytes(request: _8.AddressStringToBytesRequest): Promise<_8.AddressStringToBytesResponse>; + }; + }; + authz: { + v1beta1: { + grants(request: _12.QueryGrantsRequest): Promise<_12.QueryGrantsResponse>; + granterGrants(request: _12.QueryGranterGrantsRequest): Promise<_12.QueryGranterGrantsResponse>; + granteeGrants(request: _12.QueryGranteeGrantsRequest): Promise<_12.QueryGranteeGrantsResponse>; + }; + }; + bank: { + v1beta1: { + balance(request: _17.QueryBalanceRequest): Promise<_17.QueryBalanceResponse>; + allBalances(request: _17.QueryAllBalancesRequest): Promise<_17.QueryAllBalancesResponse>; + spendableBalances(request: _17.QuerySpendableBalancesRequest): Promise<_17.QuerySpendableBalancesResponse>; + totalSupply(request?: _17.QueryTotalSupplyRequest): Promise<_17.QueryTotalSupplyResponse>; + supplyOf(request: _17.QuerySupplyOfRequest): Promise<_17.QuerySupplyOfResponse>; + params(request?: _17.QueryParamsRequest): Promise<_17.QueryParamsResponse>; + denomMetadata(request: _17.QueryDenomMetadataRequest): Promise<_17.QueryDenomMetadataResponse>; + denomsMetadata(request?: _17.QueryDenomsMetadataRequest): Promise<_17.QueryDenomsMetadataResponse>; + denomOwners(request: _17.QueryDenomOwnersRequest): Promise<_17.QueryDenomOwnersResponse>; + }; + }; + base: { + tendermint: { + v1beta1: { + getNodeInfo(request?: _27.GetNodeInfoRequest): Promise<_27.GetNodeInfoResponse>; + getSyncing(request?: _27.GetSyncingRequest): Promise<_27.GetSyncingResponse>; + getLatestBlock(request?: _27.GetLatestBlockRequest): Promise<_27.GetLatestBlockResponse>; + getBlockByHeight(request: _27.GetBlockByHeightRequest): Promise<_27.GetBlockByHeightResponse>; + getLatestValidatorSet(request?: _27.GetLatestValidatorSetRequest): Promise<_27.GetLatestValidatorSetResponse>; + getValidatorSetByHeight(request: _27.GetValidatorSetByHeightRequest): Promise<_27.GetValidatorSetByHeightResponse>; + }; + }; + }; + distribution: { + v1beta1: { + params(request?: _41.QueryParamsRequest): Promise<_41.QueryParamsResponse>; + validatorOutstandingRewards(request: _41.QueryValidatorOutstandingRewardsRequest): Promise<_41.QueryValidatorOutstandingRewardsResponse>; + validatorCommission(request: _41.QueryValidatorCommissionRequest): Promise<_41.QueryValidatorCommissionResponse>; + validatorSlashes(request: _41.QueryValidatorSlashesRequest): Promise<_41.QueryValidatorSlashesResponse>; + delegationRewards(request: _41.QueryDelegationRewardsRequest): Promise<_41.QueryDelegationRewardsResponse>; + delegationTotalRewards(request: _41.QueryDelegationTotalRewardsRequest): Promise<_41.QueryDelegationTotalRewardsResponse>; + delegatorValidators(request: _41.QueryDelegatorValidatorsRequest): Promise<_41.QueryDelegatorValidatorsResponse>; + delegatorWithdrawAddress(request: _41.QueryDelegatorWithdrawAddressRequest): Promise<_41.QueryDelegatorWithdrawAddressResponse>; + communityPool(request?: _41.QueryCommunityPoolRequest): Promise<_41.QueryCommunityPoolResponse>; + }; + }; + evidence: { + v1beta1: { + evidence(request: _45.QueryEvidenceRequest): Promise<_45.QueryEvidenceResponse>; + allEvidence(request?: _45.QueryAllEvidenceRequest): Promise<_45.QueryAllEvidenceResponse>; + }; + }; + feegrant: { + v1beta1: { + allowance(request: _49.QueryAllowanceRequest): Promise<_49.QueryAllowanceResponse>; + allowances(request: _49.QueryAllowancesRequest): Promise<_49.QueryAllowancesResponse>; + allowancesByGranter(request: _49.QueryAllowancesByGranterRequest): Promise<_49.QueryAllowancesByGranterResponse>; + }; + }; + gov: { + v1: { + proposal(request: _54.QueryProposalRequest): Promise<_54.QueryProposalResponse>; + proposals(request: _54.QueryProposalsRequest): Promise<_54.QueryProposalsResponse>; + vote(request: _54.QueryVoteRequest): Promise<_54.QueryVoteResponse>; + votes(request: _54.QueryVotesRequest): Promise<_54.QueryVotesResponse>; + params(request: _54.QueryParamsRequest): Promise<_54.QueryParamsResponse>; + deposit(request: _54.QueryDepositRequest): Promise<_54.QueryDepositResponse>; + deposits(request: _54.QueryDepositsRequest): Promise<_54.QueryDepositsResponse>; + tallyResult(request: _54.QueryTallyResultRequest): Promise<_54.QueryTallyResultResponse>; + }; + v1beta1: { + proposal(request: _58.QueryProposalRequest): Promise<_58.QueryProposalResponse>; + proposals(request: _58.QueryProposalsRequest): Promise<_58.QueryProposalsResponse>; + vote(request: _58.QueryVoteRequest): Promise<_58.QueryVoteResponse>; + votes(request: _58.QueryVotesRequest): Promise<_58.QueryVotesResponse>; + params(request: _58.QueryParamsRequest): Promise<_58.QueryParamsResponse>; + deposit(request: _58.QueryDepositRequest): Promise<_58.QueryDepositResponse>; + deposits(request: _58.QueryDepositsRequest): Promise<_58.QueryDepositsResponse>; + tallyResult(request: _58.QueryTallyResultRequest): Promise<_58.QueryTallyResultResponse>; + }; + }; + group: { + v1: { + groupInfo(request: _62.QueryGroupInfoRequest): Promise<_62.QueryGroupInfoResponse>; + groupPolicyInfo(request: _62.QueryGroupPolicyInfoRequest): Promise<_62.QueryGroupPolicyInfoResponse>; + groupMembers(request: _62.QueryGroupMembersRequest): Promise<_62.QueryGroupMembersResponse>; + groupsByAdmin(request: _62.QueryGroupsByAdminRequest): Promise<_62.QueryGroupsByAdminResponse>; + groupPoliciesByGroup(request: _62.QueryGroupPoliciesByGroupRequest): Promise<_62.QueryGroupPoliciesByGroupResponse>; + groupPoliciesByAdmin(request: _62.QueryGroupPoliciesByAdminRequest): Promise<_62.QueryGroupPoliciesByAdminResponse>; + proposal(request: _62.QueryProposalRequest): Promise<_62.QueryProposalResponse>; + proposalsByGroupPolicy(request: _62.QueryProposalsByGroupPolicyRequest): Promise<_62.QueryProposalsByGroupPolicyResponse>; + voteByProposalVoter(request: _62.QueryVoteByProposalVoterRequest): Promise<_62.QueryVoteByProposalVoterResponse>; + votesByProposal(request: _62.QueryVotesByProposalRequest): Promise<_62.QueryVotesByProposalResponse>; + votesByVoter(request: _62.QueryVotesByVoterRequest): Promise<_62.QueryVotesByVoterResponse>; + groupsByMember(request: _62.QueryGroupsByMemberRequest): Promise<_62.QueryGroupsByMemberResponse>; + tallyResult(request: _62.QueryTallyResultRequest): Promise<_62.QueryTallyResultResponse>; + }; + }; + mint: { + v1beta1: { + params(request?: _67.QueryParamsRequest): Promise<_67.QueryParamsResponse>; + inflation(request?: _67.QueryInflationRequest): Promise<_67.QueryInflationResponse>; + annualProvisions(request?: _67.QueryAnnualProvisionsRequest): Promise<_67.QueryAnnualProvisionsResponse>; + }; + }; + nft: { + v1beta1: { + balance(request: _72.QueryBalanceRequest): Promise<_72.QueryBalanceResponse>; + owner(request: _72.QueryOwnerRequest): Promise<_72.QueryOwnerResponse>; + supply(request: _72.QuerySupplyRequest): Promise<_72.QuerySupplyResponse>; + nFTs(request: _72.QueryNFTsRequest): Promise<_72.QueryNFTsResponse>; + nFT(request: _72.QueryNFTRequest): Promise<_72.QueryNFTResponse>; + class(request: _72.QueryClassRequest): Promise<_72.QueryClassResponse>; + classes(request?: _72.QueryClassesRequest): Promise<_72.QueryClassesResponse>; + }; + }; + params: { + v1beta1: { + params(request: _77.QueryParamsRequest): Promise<_77.QueryParamsResponse>; + subspaces(request?: _77.QuerySubspacesRequest): Promise<_77.QuerySubspacesResponse>; + }; + }; + slashing: { + v1beta1: { + params(request?: _79.QueryParamsRequest): Promise<_79.QueryParamsResponse>; + signingInfo(request: _79.QuerySigningInfoRequest): Promise<_79.QuerySigningInfoResponse>; + signingInfos(request?: _79.QuerySigningInfosRequest): Promise<_79.QuerySigningInfosResponse>; + }; + }; + staking: { + v1beta1: { + validators(request: _84.QueryValidatorsRequest): Promise<_84.QueryValidatorsResponse>; + validator(request: _84.QueryValidatorRequest): Promise<_84.QueryValidatorResponse>; + validatorDelegations(request: _84.QueryValidatorDelegationsRequest): Promise<_84.QueryValidatorDelegationsResponse>; + validatorUnbondingDelegations(request: _84.QueryValidatorUnbondingDelegationsRequest): Promise<_84.QueryValidatorUnbondingDelegationsResponse>; + delegation(request: _84.QueryDelegationRequest): Promise<_84.QueryDelegationResponse>; + unbondingDelegation(request: _84.QueryUnbondingDelegationRequest): Promise<_84.QueryUnbondingDelegationResponse>; + delegatorDelegations(request: _84.QueryDelegatorDelegationsRequest): Promise<_84.QueryDelegatorDelegationsResponse>; + delegatorUnbondingDelegations(request: _84.QueryDelegatorUnbondingDelegationsRequest): Promise<_84.QueryDelegatorUnbondingDelegationsResponse>; + redelegations(request: _84.QueryRedelegationsRequest): Promise<_84.QueryRedelegationsResponse>; + delegatorValidators(request: _84.QueryDelegatorValidatorsRequest): Promise<_84.QueryDelegatorValidatorsResponse>; + delegatorValidator(request: _84.QueryDelegatorValidatorRequest): Promise<_84.QueryDelegatorValidatorResponse>; + historicalInfo(request: _84.QueryHistoricalInfoRequest): Promise<_84.QueryHistoricalInfoResponse>; + pool(request?: _84.QueryPoolRequest): Promise<_84.QueryPoolResponse>; + params(request?: _84.QueryParamsRequest): Promise<_84.QueryParamsResponse>; + }; + }; + tx: { + v1beta1: { + simulate(request: _88.SimulateRequest): Promise<_88.SimulateResponse>; + getTx(request: _88.GetTxRequest): Promise<_88.GetTxResponse>; + broadcastTx(request: _88.BroadcastTxRequest): Promise<_88.BroadcastTxResponse>; + getTxsEvent(request: _88.GetTxsEventRequest): Promise<_88.GetTxsEventResponse>; + getBlockWithTxs(request: _88.GetBlockWithTxsRequest): Promise<_88.GetBlockWithTxsResponse>; + }; + }; + upgrade: { + v1beta1: { + currentPlan(request?: _90.QueryCurrentPlanRequest): Promise<_90.QueryCurrentPlanResponse>; + appliedPlan(request: _90.QueryAppliedPlanRequest): Promise<_90.QueryAppliedPlanResponse>; + upgradedConsensusState(request: _90.QueryUpgradedConsensusStateRequest): Promise<_90.QueryUpgradedConsensusStateResponse>; + moduleVersions(request: _90.QueryModuleVersionsRequest): Promise<_90.QueryModuleVersionsResponse>; + authority(request?: _90.QueryAuthorityRequest): Promise<_90.QueryAuthorityResponse>; + }; + }; + }; + }>; + createLCDClient: ({ restEndpoint }: { + restEndpoint: string; + }) => Promise<{ + cosmos: { + auth: { + v1beta1: _145.LCDQueryClient; + }; + authz: { + v1beta1: _146.LCDQueryClient; + }; + bank: { + v1beta1: _147.LCDQueryClient; + }; + base: { + tendermint: { + v1beta1: _148.LCDQueryClient; + }; + }; + distribution: { + v1beta1: _149.LCDQueryClient; + }; + evidence: { + v1beta1: _150.LCDQueryClient; + }; + feegrant: { + v1beta1: _151.LCDQueryClient; + }; + gov: { + v1: _152.LCDQueryClient; + v1beta1: _153.LCDQueryClient; + }; + group: { + v1: _154.LCDQueryClient; + }; + mint: { + v1beta1: _155.LCDQueryClient; + }; + nft: { + v1beta1: _156.LCDQueryClient; + }; + params: { + v1beta1: _157.LCDQueryClient; + }; + slashing: { + v1beta1: _158.LCDQueryClient; + }; + staking: { + v1beta1: _159.LCDQueryClient; + }; + tx: { + v1beta1: _160.LCDQueryClient; + }; + upgrade: { + v1beta1: _161.LCDQueryClient; + }; + }; + }>; + }; +} diff --git a/packages/codegen/dist/cosmos/capability/v1beta1/capability.d.ts b/packages/codegen/dist/cosmos/capability/v1beta1/capability.d.ts new file mode 100644 index 00000000..cef4338d --- /dev/null +++ b/packages/codegen/dist/cosmos/capability/v1beta1/capability.d.ts @@ -0,0 +1,62 @@ +/// +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * Capability defines an implementation of an object capability. The index + * provided to a Capability must be globally unique. + */ +export interface Capability { + index: Long; +} +/** + * Capability defines an implementation of an object capability. The index + * provided to a Capability must be globally unique. + */ +export interface CapabilitySDKType { + index: Long; +} +/** + * Owner defines a single capability owner. An owner is defined by the name of + * capability and the module name. + */ +export interface Owner { + module: string; + name: string; +} +/** + * Owner defines a single capability owner. An owner is defined by the name of + * capability and the module name. + */ +export interface OwnerSDKType { + module: string; + name: string; +} +/** + * CapabilityOwners defines a set of owners of a single Capability. The set of + * owners must be unique. + */ +export interface CapabilityOwners { + owners: Owner[]; +} +/** + * CapabilityOwners defines a set of owners of a single Capability. The set of + * owners must be unique. + */ +export interface CapabilityOwnersSDKType { + owners: OwnerSDKType[]; +} +export declare const Capability: { + encode(message: Capability, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Capability; + fromPartial(object: DeepPartial): Capability; +}; +export declare const Owner: { + encode(message: Owner, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Owner; + fromPartial(object: DeepPartial): Owner; +}; +export declare const CapabilityOwners: { + encode(message: CapabilityOwners, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CapabilityOwners; + fromPartial(object: DeepPartial): CapabilityOwners; +}; diff --git a/packages/codegen/dist/cosmos/capability/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/capability/v1beta1/genesis.d.ts new file mode 100644 index 00000000..80ed85bb --- /dev/null +++ b/packages/codegen/dist/cosmos/capability/v1beta1/genesis.d.ts @@ -0,0 +1,41 @@ +/// +import { CapabilityOwners, CapabilityOwnersSDKType } from "./capability"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisOwners defines the capability owners with their corresponding index. */ +export interface GenesisOwners { + /** index is the index of the capability owner. */ + index: Long; + /** index_owners are the owners at the given index. */ + indexOwners?: CapabilityOwners; +} +/** GenesisOwners defines the capability owners with their corresponding index. */ +export interface GenesisOwnersSDKType { + index: Long; + index_owners?: CapabilityOwnersSDKType; +} +/** GenesisState defines the capability module's genesis state. */ +export interface GenesisState { + /** index is the capability global index. */ + index: Long; + /** + * owners represents a map from index to owners of the capability index + * index key is string to allow amino marshalling. + */ + owners: GenesisOwners[]; +} +/** GenesisState defines the capability module's genesis state. */ +export interface GenesisStateSDKType { + index: Long; + owners: GenesisOwnersSDKType[]; +} +export declare const GenesisOwners: { + encode(message: GenesisOwners, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisOwners; + fromPartial(object: DeepPartial): GenesisOwners; +}; +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/cosmos/crisis/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/crisis/v1beta1/genesis.d.ts new file mode 100644 index 00000000..fd05fea6 --- /dev/null +++ b/packages/codegen/dist/cosmos/crisis/v1beta1/genesis.d.ts @@ -0,0 +1,20 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the crisis module's genesis state. */ +export interface GenesisState { + /** + * constant_fee is the fee used to verify the invariant in the crisis + * module. + */ + constantFee?: Coin; +} +/** GenesisState defines the crisis module's genesis state. */ +export interface GenesisStateSDKType { + constant_fee?: CoinSDKType; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/cosmos/crisis/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/crisis/v1beta1/tx.d.ts new file mode 100644 index 00000000..5da6e1f5 --- /dev/null +++ b/packages/codegen/dist/cosmos/crisis/v1beta1/tx.d.ts @@ -0,0 +1,30 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** MsgVerifyInvariant represents a message to verify a particular invariance. */ +export interface MsgVerifyInvariant { + sender: string; + invariantModuleName: string; + invariantRoute: string; +} +/** MsgVerifyInvariant represents a message to verify a particular invariance. */ +export interface MsgVerifyInvariantSDKType { + sender: string; + invariant_module_name: string; + invariant_route: string; +} +/** MsgVerifyInvariantResponse defines the Msg/VerifyInvariant response type. */ +export interface MsgVerifyInvariantResponse { +} +/** MsgVerifyInvariantResponse defines the Msg/VerifyInvariant response type. */ +export interface MsgVerifyInvariantResponseSDKType { +} +export declare const MsgVerifyInvariant: { + encode(message: MsgVerifyInvariant, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVerifyInvariant; + fromPartial(object: DeepPartial): MsgVerifyInvariant; +}; +export declare const MsgVerifyInvariantResponse: { + encode(_: MsgVerifyInvariantResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVerifyInvariantResponse; + fromPartial(_: DeepPartial): MsgVerifyInvariantResponse; +}; diff --git a/packages/codegen/dist/cosmos/crisis/v1beta1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/crisis/v1beta1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..bce7eac0 --- /dev/null +++ b/packages/codegen/dist/cosmos/crisis/v1beta1/tx.rpc.msg.d.ts @@ -0,0 +1,12 @@ +import { Rpc } from "../../../helpers"; +import { MsgVerifyInvariant, MsgVerifyInvariantResponse } from "./tx"; +/** Msg defines the bank Msg service. */ +export interface Msg { + /** VerifyInvariant defines a method to verify a particular invariance. */ + verifyInvariant(request: MsgVerifyInvariant): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + verifyInvariant(request: MsgVerifyInvariant): Promise; +} diff --git a/packages/codegen/dist/cosmos/crypto/ed25519/keys.d.ts b/packages/codegen/dist/cosmos/crypto/ed25519/keys.d.ts new file mode 100644 index 00000000..e48fc896 --- /dev/null +++ b/packages/codegen/dist/cosmos/crypto/ed25519/keys.d.ts @@ -0,0 +1,46 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * PubKey is an ed25519 public key for handling Tendermint keys in SDK. + * It's needed for Any serialization and SDK compatibility. + * It must not be used in a non Tendermint key context because it doesn't implement + * ADR-28. Nevertheless, you will like to use ed25519 in app user level + * then you must create a new proto message and follow ADR-28 for Address construction. + */ +export interface PubKey { + key: Uint8Array; +} +/** + * PubKey is an ed25519 public key for handling Tendermint keys in SDK. + * It's needed for Any serialization and SDK compatibility. + * It must not be used in a non Tendermint key context because it doesn't implement + * ADR-28. Nevertheless, you will like to use ed25519 in app user level + * then you must create a new proto message and follow ADR-28 for Address construction. + */ +export interface PubKeySDKType { + key: Uint8Array; +} +/** + * Deprecated: PrivKey defines a ed25519 private key. + * NOTE: ed25519 keys must not be used in SDK apps except in a tendermint validator context. + */ +export interface PrivKey { + key: Uint8Array; +} +/** + * Deprecated: PrivKey defines a ed25519 private key. + * NOTE: ed25519 keys must not be used in SDK apps except in a tendermint validator context. + */ +export interface PrivKeySDKType { + key: Uint8Array; +} +export declare const PubKey: { + encode(message: PubKey, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PubKey; + fromPartial(object: DeepPartial): PubKey; +}; +export declare const PrivKey: { + encode(message: PrivKey, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PrivKey; + fromPartial(object: DeepPartial): PrivKey; +}; diff --git a/packages/codegen/dist/cosmos/crypto/hd/v1/hd.d.ts b/packages/codegen/dist/cosmos/crypto/hd/v1/hd.d.ts new file mode 100644 index 00000000..14fbcc44 --- /dev/null +++ b/packages/codegen/dist/cosmos/crypto/hd/v1/hd.d.ts @@ -0,0 +1,31 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** BIP44Params is used as path field in ledger item in Record. */ +export interface BIP44Params { + /** purpose is a constant set to 44' (or 0x8000002C) following the BIP43 recommendation */ + purpose: number; + /** coin_type is a constant that improves privacy */ + coinType: number; + /** account splits the key space into independent user identities */ + account: number; + /** + * change is a constant used for public derivation. Constant 0 is used for external chain and constant 1 for internal + * chain. + */ + change: boolean; + /** address_index is used as child index in BIP32 derivation */ + addressIndex: number; +} +/** BIP44Params is used as path field in ledger item in Record. */ +export interface BIP44ParamsSDKType { + purpose: number; + coin_type: number; + account: number; + change: boolean; + address_index: number; +} +export declare const BIP44Params: { + encode(message: BIP44Params, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BIP44Params; + fromPartial(object: DeepPartial): BIP44Params; +}; diff --git a/packages/codegen/dist/cosmos/crypto/keyring/v1/record.d.ts b/packages/codegen/dist/cosmos/crypto/keyring/v1/record.d.ts new file mode 100644 index 00000000..3455a60e --- /dev/null +++ b/packages/codegen/dist/cosmos/crypto/keyring/v1/record.d.ts @@ -0,0 +1,89 @@ +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { BIP44Params, BIP44ParamsSDKType } from "../../hd/v1/hd"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** Record is used for representing a key in the keyring. */ +export interface Record { + /** name represents a name of Record */ + name: string; + /** pub_key represents a public key in any format */ + pubKey?: Any; + /** local stores the public information about a locally stored key */ + local?: Record_Local; + /** ledger stores the public information about a Ledger key */ + ledger?: Record_Ledger; + /** Multi does not store any information. */ + multi?: Record_Multi; + /** Offline does not store any information. */ + offline?: Record_Offline; +} +/** Record is used for representing a key in the keyring. */ +export interface RecordSDKType { + name: string; + pub_key?: AnySDKType; + local?: Record_LocalSDKType; + ledger?: Record_LedgerSDKType; + multi?: Record_MultiSDKType; + offline?: Record_OfflineSDKType; +} +/** + * Item is a keyring item stored in a keyring backend. + * Local item + */ +export interface Record_Local { + privKey?: Any; + privKeyType: string; +} +/** + * Item is a keyring item stored in a keyring backend. + * Local item + */ +export interface Record_LocalSDKType { + priv_key?: AnySDKType; + priv_key_type: string; +} +/** Ledger item */ +export interface Record_Ledger { + path?: BIP44Params; +} +/** Ledger item */ +export interface Record_LedgerSDKType { + path?: BIP44ParamsSDKType; +} +/** Multi item */ +export interface Record_Multi { +} +/** Multi item */ +export interface Record_MultiSDKType { +} +/** Offline item */ +export interface Record_Offline { +} +/** Offline item */ +export interface Record_OfflineSDKType { +} +export declare const Record: { + encode(message: Record, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Record; + fromPartial(object: DeepPartial): Record; +}; +export declare const Record_Local: { + encode(message: Record_Local, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Record_Local; + fromPartial(object: DeepPartial): Record_Local; +}; +export declare const Record_Ledger: { + encode(message: Record_Ledger, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Record_Ledger; + fromPartial(object: DeepPartial): Record_Ledger; +}; +export declare const Record_Multi: { + encode(_: Record_Multi, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Record_Multi; + fromPartial(_: DeepPartial): Record_Multi; +}; +export declare const Record_Offline: { + encode(_: Record_Offline, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Record_Offline; + fromPartial(_: DeepPartial): Record_Offline; +}; diff --git a/packages/codegen/dist/cosmos/crypto/multisig/keys.d.ts b/packages/codegen/dist/cosmos/crypto/multisig/keys.d.ts new file mode 100644 index 00000000..e1ee8b57 --- /dev/null +++ b/packages/codegen/dist/cosmos/crypto/multisig/keys.d.ts @@ -0,0 +1,26 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * LegacyAminoPubKey specifies a public key type + * which nests multiple public keys and a threshold, + * it uses legacy amino address rules. + */ +export interface LegacyAminoPubKey { + threshold: number; + publicKeys: Any[]; +} +/** + * LegacyAminoPubKey specifies a public key type + * which nests multiple public keys and a threshold, + * it uses legacy amino address rules. + */ +export interface LegacyAminoPubKeySDKType { + threshold: number; + public_keys: AnySDKType[]; +} +export declare const LegacyAminoPubKey: { + encode(message: LegacyAminoPubKey, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): LegacyAminoPubKey; + fromPartial(object: DeepPartial): LegacyAminoPubKey; +}; diff --git a/packages/codegen/dist/cosmos/crypto/multisig/v1beta1/multisig.d.ts b/packages/codegen/dist/cosmos/crypto/multisig/v1beta1/multisig.d.ts new file mode 100644 index 00000000..db981e96 --- /dev/null +++ b/packages/codegen/dist/cosmos/crypto/multisig/v1beta1/multisig.d.ts @@ -0,0 +1,48 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * MultiSignature wraps the signatures from a multisig.LegacyAminoPubKey. + * See cosmos.tx.v1betata1.ModeInfo.Multi for how to specify which signers + * signed and with which modes. + */ +export interface MultiSignature { + signatures: Uint8Array[]; +} +/** + * MultiSignature wraps the signatures from a multisig.LegacyAminoPubKey. + * See cosmos.tx.v1betata1.ModeInfo.Multi for how to specify which signers + * signed and with which modes. + */ +export interface MultiSignatureSDKType { + signatures: Uint8Array[]; +} +/** + * CompactBitArray is an implementation of a space efficient bit array. + * This is used to ensure that the encoded data takes up a minimal amount of + * space after proto encoding. + * This is not thread safe, and is not intended for concurrent usage. + */ +export interface CompactBitArray { + extraBitsStored: number; + elems: Uint8Array; +} +/** + * CompactBitArray is an implementation of a space efficient bit array. + * This is used to ensure that the encoded data takes up a minimal amount of + * space after proto encoding. + * This is not thread safe, and is not intended for concurrent usage. + */ +export interface CompactBitArraySDKType { + extra_bits_stored: number; + elems: Uint8Array; +} +export declare const MultiSignature: { + encode(message: MultiSignature, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MultiSignature; + fromPartial(object: DeepPartial): MultiSignature; +}; +export declare const CompactBitArray: { + encode(message: CompactBitArray, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CompactBitArray; + fromPartial(object: DeepPartial): CompactBitArray; +}; diff --git a/packages/codegen/dist/cosmos/crypto/secp256k1/keys.d.ts b/packages/codegen/dist/cosmos/crypto/secp256k1/keys.d.ts new file mode 100644 index 00000000..3d87be5a --- /dev/null +++ b/packages/codegen/dist/cosmos/crypto/secp256k1/keys.d.ts @@ -0,0 +1,40 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * PubKey defines a secp256k1 public key + * Key is the compressed form of the pubkey. The first byte depends is a 0x02 byte + * if the y-coordinate is the lexicographically largest of the two associated with + * the x-coordinate. Otherwise the first byte is a 0x03. + * This prefix is followed with the x-coordinate. + */ +export interface PubKey { + key: Uint8Array; +} +/** + * PubKey defines a secp256k1 public key + * Key is the compressed form of the pubkey. The first byte depends is a 0x02 byte + * if the y-coordinate is the lexicographically largest of the two associated with + * the x-coordinate. Otherwise the first byte is a 0x03. + * This prefix is followed with the x-coordinate. + */ +export interface PubKeySDKType { + key: Uint8Array; +} +/** PrivKey defines a secp256k1 private key. */ +export interface PrivKey { + key: Uint8Array; +} +/** PrivKey defines a secp256k1 private key. */ +export interface PrivKeySDKType { + key: Uint8Array; +} +export declare const PubKey: { + encode(message: PubKey, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PubKey; + fromPartial(object: DeepPartial): PubKey; +}; +export declare const PrivKey: { + encode(message: PrivKey, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PrivKey; + fromPartial(object: DeepPartial): PrivKey; +}; diff --git a/packages/codegen/dist/cosmos/crypto/secp256r1/keys.d.ts b/packages/codegen/dist/cosmos/crypto/secp256r1/keys.d.ts new file mode 100644 index 00000000..9a482187 --- /dev/null +++ b/packages/codegen/dist/cosmos/crypto/secp256r1/keys.d.ts @@ -0,0 +1,33 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** PubKey defines a secp256r1 ECDSA public key. */ +export interface PubKey { + /** + * Point on secp256r1 curve in a compressed representation as specified in section + * 4.3.6 of ANSI X9.62: https://webstore.ansi.org/standards/ascx9/ansix9621998 + */ + key: Uint8Array; +} +/** PubKey defines a secp256r1 ECDSA public key. */ +export interface PubKeySDKType { + key: Uint8Array; +} +/** PrivKey defines a secp256r1 ECDSA private key. */ +export interface PrivKey { + /** secret number serialized using big-endian encoding */ + secret: Uint8Array; +} +/** PrivKey defines a secp256r1 ECDSA private key. */ +export interface PrivKeySDKType { + secret: Uint8Array; +} +export declare const PubKey: { + encode(message: PubKey, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PubKey; + fromPartial(object: DeepPartial): PubKey; +}; +export declare const PrivKey: { + encode(message: PrivKey, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PrivKey; + fromPartial(object: DeepPartial): PrivKey; +}; diff --git a/packages/codegen/dist/cosmos/distribution/v1beta1/distribution.d.ts b/packages/codegen/dist/cosmos/distribution/v1beta1/distribution.d.ts new file mode 100644 index 00000000..b663d6cd --- /dev/null +++ b/packages/codegen/dist/cosmos/distribution/v1beta1/distribution.d.ts @@ -0,0 +1,282 @@ +/// +import { DecCoin, DecCoinSDKType, Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** Params defines the set of params for the distribution module. */ +export interface Params { + communityTax: string; + baseProposerReward: string; + bonusProposerReward: string; + withdrawAddrEnabled: boolean; +} +/** Params defines the set of params for the distribution module. */ +export interface ParamsSDKType { + community_tax: string; + base_proposer_reward: string; + bonus_proposer_reward: string; + withdraw_addr_enabled: boolean; +} +/** + * ValidatorHistoricalRewards represents historical rewards for a validator. + * Height is implicit within the store key. + * Cumulative reward ratio is the sum from the zeroeth period + * until this period of rewards / tokens, per the spec. + * The reference count indicates the number of objects + * which might need to reference this historical entry at any point. + * ReferenceCount = + * number of outstanding delegations which ended the associated period (and + * might need to read that record) + * + number of slashes which ended the associated period (and might need to + * read that record) + * + one per validator for the zeroeth period, set on initialization + */ +export interface ValidatorHistoricalRewards { + cumulativeRewardRatio: DecCoin[]; + referenceCount: number; +} +/** + * ValidatorHistoricalRewards represents historical rewards for a validator. + * Height is implicit within the store key. + * Cumulative reward ratio is the sum from the zeroeth period + * until this period of rewards / tokens, per the spec. + * The reference count indicates the number of objects + * which might need to reference this historical entry at any point. + * ReferenceCount = + * number of outstanding delegations which ended the associated period (and + * might need to read that record) + * + number of slashes which ended the associated period (and might need to + * read that record) + * + one per validator for the zeroeth period, set on initialization + */ +export interface ValidatorHistoricalRewardsSDKType { + cumulative_reward_ratio: DecCoinSDKType[]; + reference_count: number; +} +/** + * ValidatorCurrentRewards represents current rewards and current + * period for a validator kept as a running counter and incremented + * each block as long as the validator's tokens remain constant. + */ +export interface ValidatorCurrentRewards { + rewards: DecCoin[]; + period: Long; +} +/** + * ValidatorCurrentRewards represents current rewards and current + * period for a validator kept as a running counter and incremented + * each block as long as the validator's tokens remain constant. + */ +export interface ValidatorCurrentRewardsSDKType { + rewards: DecCoinSDKType[]; + period: Long; +} +/** + * ValidatorAccumulatedCommission represents accumulated commission + * for a validator kept as a running counter, can be withdrawn at any time. + */ +export interface ValidatorAccumulatedCommission { + commission: DecCoin[]; +} +/** + * ValidatorAccumulatedCommission represents accumulated commission + * for a validator kept as a running counter, can be withdrawn at any time. + */ +export interface ValidatorAccumulatedCommissionSDKType { + commission: DecCoinSDKType[]; +} +/** + * ValidatorOutstandingRewards represents outstanding (un-withdrawn) rewards + * for a validator inexpensive to track, allows simple sanity checks. + */ +export interface ValidatorOutstandingRewards { + rewards: DecCoin[]; +} +/** + * ValidatorOutstandingRewards represents outstanding (un-withdrawn) rewards + * for a validator inexpensive to track, allows simple sanity checks. + */ +export interface ValidatorOutstandingRewardsSDKType { + rewards: DecCoinSDKType[]; +} +/** + * ValidatorSlashEvent represents a validator slash event. + * Height is implicit within the store key. + * This is needed to calculate appropriate amount of staking tokens + * for delegations which are withdrawn after a slash has occurred. + */ +export interface ValidatorSlashEvent { + validatorPeriod: Long; + fraction: string; +} +/** + * ValidatorSlashEvent represents a validator slash event. + * Height is implicit within the store key. + * This is needed to calculate appropriate amount of staking tokens + * for delegations which are withdrawn after a slash has occurred. + */ +export interface ValidatorSlashEventSDKType { + validator_period: Long; + fraction: string; +} +/** ValidatorSlashEvents is a collection of ValidatorSlashEvent messages. */ +export interface ValidatorSlashEvents { + validatorSlashEvents: ValidatorSlashEvent[]; +} +/** ValidatorSlashEvents is a collection of ValidatorSlashEvent messages. */ +export interface ValidatorSlashEventsSDKType { + validator_slash_events: ValidatorSlashEventSDKType[]; +} +/** FeePool is the global fee pool for distribution. */ +export interface FeePool { + communityPool: DecCoin[]; +} +/** FeePool is the global fee pool for distribution. */ +export interface FeePoolSDKType { + community_pool: DecCoinSDKType[]; +} +/** + * CommunityPoolSpendProposal details a proposal for use of community funds, + * together with how many coins are proposed to be spent, and to which + * recipient account. + */ +export interface CommunityPoolSpendProposal { + title: string; + description: string; + recipient: string; + amount: Coin[]; +} +/** + * CommunityPoolSpendProposal details a proposal for use of community funds, + * together with how many coins are proposed to be spent, and to which + * recipient account. + */ +export interface CommunityPoolSpendProposalSDKType { + title: string; + description: string; + recipient: string; + amount: CoinSDKType[]; +} +/** + * DelegatorStartingInfo represents the starting info for a delegator reward + * period. It tracks the previous validator period, the delegation's amount of + * staking token, and the creation height (to check later on if any slashes have + * occurred). NOTE: Even though validators are slashed to whole staking tokens, + * the delegators within the validator may be left with less than a full token, + * thus sdk.Dec is used. + */ +export interface DelegatorStartingInfo { + previousPeriod: Long; + stake: string; + height: Long; +} +/** + * DelegatorStartingInfo represents the starting info for a delegator reward + * period. It tracks the previous validator period, the delegation's amount of + * staking token, and the creation height (to check later on if any slashes have + * occurred). NOTE: Even though validators are slashed to whole staking tokens, + * the delegators within the validator may be left with less than a full token, + * thus sdk.Dec is used. + */ +export interface DelegatorStartingInfoSDKType { + previous_period: Long; + stake: string; + height: Long; +} +/** + * DelegationDelegatorReward represents the properties + * of a delegator's delegation reward. + */ +export interface DelegationDelegatorReward { + validatorAddress: string; + reward: DecCoin[]; +} +/** + * DelegationDelegatorReward represents the properties + * of a delegator's delegation reward. + */ +export interface DelegationDelegatorRewardSDKType { + validator_address: string; + reward: DecCoinSDKType[]; +} +/** + * CommunityPoolSpendProposalWithDeposit defines a CommunityPoolSpendProposal + * with a deposit + */ +export interface CommunityPoolSpendProposalWithDeposit { + title: string; + description: string; + recipient: string; + amount: string; + deposit: string; +} +/** + * CommunityPoolSpendProposalWithDeposit defines a CommunityPoolSpendProposal + * with a deposit + */ +export interface CommunityPoolSpendProposalWithDepositSDKType { + title: string; + description: string; + recipient: string; + amount: string; + deposit: string; +} +export declare const Params: { + encode(message: Params, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Params; + fromPartial(object: DeepPartial): Params; +}; +export declare const ValidatorHistoricalRewards: { + encode(message: ValidatorHistoricalRewards, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorHistoricalRewards; + fromPartial(object: DeepPartial): ValidatorHistoricalRewards; +}; +export declare const ValidatorCurrentRewards: { + encode(message: ValidatorCurrentRewards, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorCurrentRewards; + fromPartial(object: DeepPartial): ValidatorCurrentRewards; +}; +export declare const ValidatorAccumulatedCommission: { + encode(message: ValidatorAccumulatedCommission, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorAccumulatedCommission; + fromPartial(object: DeepPartial): ValidatorAccumulatedCommission; +}; +export declare const ValidatorOutstandingRewards: { + encode(message: ValidatorOutstandingRewards, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorOutstandingRewards; + fromPartial(object: DeepPartial): ValidatorOutstandingRewards; +}; +export declare const ValidatorSlashEvent: { + encode(message: ValidatorSlashEvent, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorSlashEvent; + fromPartial(object: DeepPartial): ValidatorSlashEvent; +}; +export declare const ValidatorSlashEvents: { + encode(message: ValidatorSlashEvents, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorSlashEvents; + fromPartial(object: DeepPartial): ValidatorSlashEvents; +}; +export declare const FeePool: { + encode(message: FeePool, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): FeePool; + fromPartial(object: DeepPartial): FeePool; +}; +export declare const CommunityPoolSpendProposal: { + encode(message: CommunityPoolSpendProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CommunityPoolSpendProposal; + fromPartial(object: DeepPartial): CommunityPoolSpendProposal; +}; +export declare const DelegatorStartingInfo: { + encode(message: DelegatorStartingInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DelegatorStartingInfo; + fromPartial(object: DeepPartial): DelegatorStartingInfo; +}; +export declare const DelegationDelegatorReward: { + encode(message: DelegationDelegatorReward, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DelegationDelegatorReward; + fromPartial(object: DeepPartial): DelegationDelegatorReward; +}; +export declare const CommunityPoolSpendProposalWithDeposit: { + encode(message: CommunityPoolSpendProposalWithDeposit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CommunityPoolSpendProposalWithDeposit; + fromPartial(object: DeepPartial): CommunityPoolSpendProposalWithDeposit; +}; diff --git a/packages/codegen/dist/cosmos/distribution/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/distribution/v1beta1/genesis.d.ts new file mode 100644 index 00000000..1eb2cf0b --- /dev/null +++ b/packages/codegen/dist/cosmos/distribution/v1beta1/genesis.d.ts @@ -0,0 +1,197 @@ +/// +import { DecCoin, DecCoinSDKType } from "../../base/v1beta1/coin"; +import { ValidatorAccumulatedCommission, ValidatorAccumulatedCommissionSDKType, ValidatorHistoricalRewards, ValidatorHistoricalRewardsSDKType, ValidatorCurrentRewards, ValidatorCurrentRewardsSDKType, DelegatorStartingInfo, DelegatorStartingInfoSDKType, ValidatorSlashEvent, ValidatorSlashEventSDKType, Params, ParamsSDKType, FeePool, FeePoolSDKType } from "./distribution"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** + * DelegatorWithdrawInfo is the address for where distributions rewards are + * withdrawn to by default this struct is only used at genesis to feed in + * default withdraw addresses. + */ +export interface DelegatorWithdrawInfo { + /** delegator_address is the address of the delegator. */ + delegatorAddress: string; + /** withdraw_address is the address to withdraw the delegation rewards to. */ + withdrawAddress: string; +} +/** + * DelegatorWithdrawInfo is the address for where distributions rewards are + * withdrawn to by default this struct is only used at genesis to feed in + * default withdraw addresses. + */ +export interface DelegatorWithdrawInfoSDKType { + delegator_address: string; + withdraw_address: string; +} +/** ValidatorOutstandingRewardsRecord is used for import/export via genesis json. */ +export interface ValidatorOutstandingRewardsRecord { + /** validator_address is the address of the validator. */ + validatorAddress: string; + /** outstanding_rewards represents the oustanding rewards of a validator. */ + outstandingRewards: DecCoin[]; +} +/** ValidatorOutstandingRewardsRecord is used for import/export via genesis json. */ +export interface ValidatorOutstandingRewardsRecordSDKType { + validator_address: string; + outstanding_rewards: DecCoinSDKType[]; +} +/** + * ValidatorAccumulatedCommissionRecord is used for import / export via genesis + * json. + */ +export interface ValidatorAccumulatedCommissionRecord { + /** validator_address is the address of the validator. */ + validatorAddress: string; + /** accumulated is the accumulated commission of a validator. */ + accumulated?: ValidatorAccumulatedCommission; +} +/** + * ValidatorAccumulatedCommissionRecord is used for import / export via genesis + * json. + */ +export interface ValidatorAccumulatedCommissionRecordSDKType { + validator_address: string; + accumulated?: ValidatorAccumulatedCommissionSDKType; +} +/** + * ValidatorHistoricalRewardsRecord is used for import / export via genesis + * json. + */ +export interface ValidatorHistoricalRewardsRecord { + /** validator_address is the address of the validator. */ + validatorAddress: string; + /** period defines the period the historical rewards apply to. */ + period: Long; + /** rewards defines the historical rewards of a validator. */ + rewards?: ValidatorHistoricalRewards; +} +/** + * ValidatorHistoricalRewardsRecord is used for import / export via genesis + * json. + */ +export interface ValidatorHistoricalRewardsRecordSDKType { + validator_address: string; + period: Long; + rewards?: ValidatorHistoricalRewardsSDKType; +} +/** ValidatorCurrentRewardsRecord is used for import / export via genesis json. */ +export interface ValidatorCurrentRewardsRecord { + /** validator_address is the address of the validator. */ + validatorAddress: string; + /** rewards defines the current rewards of a validator. */ + rewards?: ValidatorCurrentRewards; +} +/** ValidatorCurrentRewardsRecord is used for import / export via genesis json. */ +export interface ValidatorCurrentRewardsRecordSDKType { + validator_address: string; + rewards?: ValidatorCurrentRewardsSDKType; +} +/** DelegatorStartingInfoRecord used for import / export via genesis json. */ +export interface DelegatorStartingInfoRecord { + /** delegator_address is the address of the delegator. */ + delegatorAddress: string; + /** validator_address is the address of the validator. */ + validatorAddress: string; + /** starting_info defines the starting info of a delegator. */ + startingInfo?: DelegatorStartingInfo; +} +/** DelegatorStartingInfoRecord used for import / export via genesis json. */ +export interface DelegatorStartingInfoRecordSDKType { + delegator_address: string; + validator_address: string; + starting_info?: DelegatorStartingInfoSDKType; +} +/** ValidatorSlashEventRecord is used for import / export via genesis json. */ +export interface ValidatorSlashEventRecord { + /** validator_address is the address of the validator. */ + validatorAddress: string; + /** height defines the block height at which the slash event occured. */ + height: Long; + /** period is the period of the slash event. */ + period: Long; + /** validator_slash_event describes the slash event. */ + validatorSlashEvent?: ValidatorSlashEvent; +} +/** ValidatorSlashEventRecord is used for import / export via genesis json. */ +export interface ValidatorSlashEventRecordSDKType { + validator_address: string; + height: Long; + period: Long; + validator_slash_event?: ValidatorSlashEventSDKType; +} +/** GenesisState defines the distribution module's genesis state. */ +export interface GenesisState { + /** params defines all the paramaters of the module. */ + params?: Params; + /** fee_pool defines the fee pool at genesis. */ + feePool?: FeePool; + /** fee_pool defines the delegator withdraw infos at genesis. */ + delegatorWithdrawInfos: DelegatorWithdrawInfo[]; + /** fee_pool defines the previous proposer at genesis. */ + previousProposer: string; + /** fee_pool defines the outstanding rewards of all validators at genesis. */ + outstandingRewards: ValidatorOutstandingRewardsRecord[]; + /** fee_pool defines the accumulated commisions of all validators at genesis. */ + validatorAccumulatedCommissions: ValidatorAccumulatedCommissionRecord[]; + /** fee_pool defines the historical rewards of all validators at genesis. */ + validatorHistoricalRewards: ValidatorHistoricalRewardsRecord[]; + /** fee_pool defines the current rewards of all validators at genesis. */ + validatorCurrentRewards: ValidatorCurrentRewardsRecord[]; + /** fee_pool defines the delegator starting infos at genesis. */ + delegatorStartingInfos: DelegatorStartingInfoRecord[]; + /** fee_pool defines the validator slash events at genesis. */ + validatorSlashEvents: ValidatorSlashEventRecord[]; +} +/** GenesisState defines the distribution module's genesis state. */ +export interface GenesisStateSDKType { + params?: ParamsSDKType; + fee_pool?: FeePoolSDKType; + delegator_withdraw_infos: DelegatorWithdrawInfoSDKType[]; + previous_proposer: string; + outstanding_rewards: ValidatorOutstandingRewardsRecordSDKType[]; + validator_accumulated_commissions: ValidatorAccumulatedCommissionRecordSDKType[]; + validator_historical_rewards: ValidatorHistoricalRewardsRecordSDKType[]; + validator_current_rewards: ValidatorCurrentRewardsRecordSDKType[]; + delegator_starting_infos: DelegatorStartingInfoRecordSDKType[]; + validator_slash_events: ValidatorSlashEventRecordSDKType[]; +} +export declare const DelegatorWithdrawInfo: { + encode(message: DelegatorWithdrawInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DelegatorWithdrawInfo; + fromPartial(object: DeepPartial): DelegatorWithdrawInfo; +}; +export declare const ValidatorOutstandingRewardsRecord: { + encode(message: ValidatorOutstandingRewardsRecord, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorOutstandingRewardsRecord; + fromPartial(object: DeepPartial): ValidatorOutstandingRewardsRecord; +}; +export declare const ValidatorAccumulatedCommissionRecord: { + encode(message: ValidatorAccumulatedCommissionRecord, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorAccumulatedCommissionRecord; + fromPartial(object: DeepPartial): ValidatorAccumulatedCommissionRecord; +}; +export declare const ValidatorHistoricalRewardsRecord: { + encode(message: ValidatorHistoricalRewardsRecord, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorHistoricalRewardsRecord; + fromPartial(object: DeepPartial): ValidatorHistoricalRewardsRecord; +}; +export declare const ValidatorCurrentRewardsRecord: { + encode(message: ValidatorCurrentRewardsRecord, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorCurrentRewardsRecord; + fromPartial(object: DeepPartial): ValidatorCurrentRewardsRecord; +}; +export declare const DelegatorStartingInfoRecord: { + encode(message: DelegatorStartingInfoRecord, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DelegatorStartingInfoRecord; + fromPartial(object: DeepPartial): DelegatorStartingInfoRecord; +}; +export declare const ValidatorSlashEventRecord: { + encode(message: ValidatorSlashEventRecord, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorSlashEventRecord; + fromPartial(object: DeepPartial): ValidatorSlashEventRecord; +}; +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/cosmos/distribution/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/distribution/v1beta1/query.d.ts new file mode 100644 index 00000000..ea20f636 --- /dev/null +++ b/packages/codegen/dist/cosmos/distribution/v1beta1/query.d.ts @@ -0,0 +1,365 @@ +/// +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Params, ParamsSDKType, ValidatorOutstandingRewards, ValidatorOutstandingRewardsSDKType, ValidatorAccumulatedCommission, ValidatorAccumulatedCommissionSDKType, ValidatorSlashEvent, ValidatorSlashEventSDKType, DelegationDelegatorReward, DelegationDelegatorRewardSDKType } from "./distribution"; +import { DecCoin, DecCoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequest { +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequestSDKType { +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params?: Params; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +/** + * QueryValidatorOutstandingRewardsRequest is the request type for the + * Query/ValidatorOutstandingRewards RPC method. + */ +export interface QueryValidatorOutstandingRewardsRequest { + /** validator_address defines the validator address to query for. */ + validatorAddress: string; +} +/** + * QueryValidatorOutstandingRewardsRequest is the request type for the + * Query/ValidatorOutstandingRewards RPC method. + */ +export interface QueryValidatorOutstandingRewardsRequestSDKType { + validator_address: string; +} +/** + * QueryValidatorOutstandingRewardsResponse is the response type for the + * Query/ValidatorOutstandingRewards RPC method. + */ +export interface QueryValidatorOutstandingRewardsResponse { + rewards?: ValidatorOutstandingRewards; +} +/** + * QueryValidatorOutstandingRewardsResponse is the response type for the + * Query/ValidatorOutstandingRewards RPC method. + */ +export interface QueryValidatorOutstandingRewardsResponseSDKType { + rewards?: ValidatorOutstandingRewardsSDKType; +} +/** + * QueryValidatorCommissionRequest is the request type for the + * Query/ValidatorCommission RPC method + */ +export interface QueryValidatorCommissionRequest { + /** validator_address defines the validator address to query for. */ + validatorAddress: string; +} +/** + * QueryValidatorCommissionRequest is the request type for the + * Query/ValidatorCommission RPC method + */ +export interface QueryValidatorCommissionRequestSDKType { + validator_address: string; +} +/** + * QueryValidatorCommissionResponse is the response type for the + * Query/ValidatorCommission RPC method + */ +export interface QueryValidatorCommissionResponse { + /** commission defines the commision the validator received. */ + commission?: ValidatorAccumulatedCommission; +} +/** + * QueryValidatorCommissionResponse is the response type for the + * Query/ValidatorCommission RPC method + */ +export interface QueryValidatorCommissionResponseSDKType { + commission?: ValidatorAccumulatedCommissionSDKType; +} +/** + * QueryValidatorSlashesRequest is the request type for the + * Query/ValidatorSlashes RPC method + */ +export interface QueryValidatorSlashesRequest { + /** validator_address defines the validator address to query for. */ + validatorAddress: string; + /** starting_height defines the optional starting height to query the slashes. */ + startingHeight: Long; + /** starting_height defines the optional ending height to query the slashes. */ + endingHeight: Long; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryValidatorSlashesRequest is the request type for the + * Query/ValidatorSlashes RPC method + */ +export interface QueryValidatorSlashesRequestSDKType { + validator_address: string; + starting_height: Long; + ending_height: Long; + pagination?: PageRequestSDKType; +} +/** + * QueryValidatorSlashesResponse is the response type for the + * Query/ValidatorSlashes RPC method. + */ +export interface QueryValidatorSlashesResponse { + /** slashes defines the slashes the validator received. */ + slashes: ValidatorSlashEvent[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryValidatorSlashesResponse is the response type for the + * Query/ValidatorSlashes RPC method. + */ +export interface QueryValidatorSlashesResponseSDKType { + slashes: ValidatorSlashEventSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryDelegationRewardsRequest is the request type for the + * Query/DelegationRewards RPC method. + */ +export interface QueryDelegationRewardsRequest { + /** delegator_address defines the delegator address to query for. */ + delegatorAddress: string; + /** validator_address defines the validator address to query for. */ + validatorAddress: string; +} +/** + * QueryDelegationRewardsRequest is the request type for the + * Query/DelegationRewards RPC method. + */ +export interface QueryDelegationRewardsRequestSDKType { + delegator_address: string; + validator_address: string; +} +/** + * QueryDelegationRewardsResponse is the response type for the + * Query/DelegationRewards RPC method. + */ +export interface QueryDelegationRewardsResponse { + /** rewards defines the rewards accrued by a delegation. */ + rewards: DecCoin[]; +} +/** + * QueryDelegationRewardsResponse is the response type for the + * Query/DelegationRewards RPC method. + */ +export interface QueryDelegationRewardsResponseSDKType { + rewards: DecCoinSDKType[]; +} +/** + * QueryDelegationTotalRewardsRequest is the request type for the + * Query/DelegationTotalRewards RPC method. + */ +export interface QueryDelegationTotalRewardsRequest { + /** delegator_address defines the delegator address to query for. */ + delegatorAddress: string; +} +/** + * QueryDelegationTotalRewardsRequest is the request type for the + * Query/DelegationTotalRewards RPC method. + */ +export interface QueryDelegationTotalRewardsRequestSDKType { + delegator_address: string; +} +/** + * QueryDelegationTotalRewardsResponse is the response type for the + * Query/DelegationTotalRewards RPC method. + */ +export interface QueryDelegationTotalRewardsResponse { + /** rewards defines all the rewards accrued by a delegator. */ + rewards: DelegationDelegatorReward[]; + /** total defines the sum of all the rewards. */ + total: DecCoin[]; +} +/** + * QueryDelegationTotalRewardsResponse is the response type for the + * Query/DelegationTotalRewards RPC method. + */ +export interface QueryDelegationTotalRewardsResponseSDKType { + rewards: DelegationDelegatorRewardSDKType[]; + total: DecCoinSDKType[]; +} +/** + * QueryDelegatorValidatorsRequest is the request type for the + * Query/DelegatorValidators RPC method. + */ +export interface QueryDelegatorValidatorsRequest { + /** delegator_address defines the delegator address to query for. */ + delegatorAddress: string; +} +/** + * QueryDelegatorValidatorsRequest is the request type for the + * Query/DelegatorValidators RPC method. + */ +export interface QueryDelegatorValidatorsRequestSDKType { + delegator_address: string; +} +/** + * QueryDelegatorValidatorsResponse is the response type for the + * Query/DelegatorValidators RPC method. + */ +export interface QueryDelegatorValidatorsResponse { + /** validators defines the validators a delegator is delegating for. */ + validators: string[]; +} +/** + * QueryDelegatorValidatorsResponse is the response type for the + * Query/DelegatorValidators RPC method. + */ +export interface QueryDelegatorValidatorsResponseSDKType { + validators: string[]; +} +/** + * QueryDelegatorWithdrawAddressRequest is the request type for the + * Query/DelegatorWithdrawAddress RPC method. + */ +export interface QueryDelegatorWithdrawAddressRequest { + /** delegator_address defines the delegator address to query for. */ + delegatorAddress: string; +} +/** + * QueryDelegatorWithdrawAddressRequest is the request type for the + * Query/DelegatorWithdrawAddress RPC method. + */ +export interface QueryDelegatorWithdrawAddressRequestSDKType { + delegator_address: string; +} +/** + * QueryDelegatorWithdrawAddressResponse is the response type for the + * Query/DelegatorWithdrawAddress RPC method. + */ +export interface QueryDelegatorWithdrawAddressResponse { + /** withdraw_address defines the delegator address to query for. */ + withdrawAddress: string; +} +/** + * QueryDelegatorWithdrawAddressResponse is the response type for the + * Query/DelegatorWithdrawAddress RPC method. + */ +export interface QueryDelegatorWithdrawAddressResponseSDKType { + withdraw_address: string; +} +/** + * QueryCommunityPoolRequest is the request type for the Query/CommunityPool RPC + * method. + */ +export interface QueryCommunityPoolRequest { +} +/** + * QueryCommunityPoolRequest is the request type for the Query/CommunityPool RPC + * method. + */ +export interface QueryCommunityPoolRequestSDKType { +} +/** + * QueryCommunityPoolResponse is the response type for the Query/CommunityPool + * RPC method. + */ +export interface QueryCommunityPoolResponse { + /** pool defines community pool's coins. */ + pool: DecCoin[]; +} +/** + * QueryCommunityPoolResponse is the response type for the Query/CommunityPool + * RPC method. + */ +export interface QueryCommunityPoolResponseSDKType { + pool: DecCoinSDKType[]; +} +export declare const QueryParamsRequest: { + encode(_: QueryParamsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest; + fromPartial(_: DeepPartial): QueryParamsRequest; +}; +export declare const QueryParamsResponse: { + encode(message: QueryParamsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse; + fromPartial(object: DeepPartial): QueryParamsResponse; +}; +export declare const QueryValidatorOutstandingRewardsRequest: { + encode(message: QueryValidatorOutstandingRewardsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorOutstandingRewardsRequest; + fromPartial(object: DeepPartial): QueryValidatorOutstandingRewardsRequest; +}; +export declare const QueryValidatorOutstandingRewardsResponse: { + encode(message: QueryValidatorOutstandingRewardsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorOutstandingRewardsResponse; + fromPartial(object: DeepPartial): QueryValidatorOutstandingRewardsResponse; +}; +export declare const QueryValidatorCommissionRequest: { + encode(message: QueryValidatorCommissionRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorCommissionRequest; + fromPartial(object: DeepPartial): QueryValidatorCommissionRequest; +}; +export declare const QueryValidatorCommissionResponse: { + encode(message: QueryValidatorCommissionResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorCommissionResponse; + fromPartial(object: DeepPartial): QueryValidatorCommissionResponse; +}; +export declare const QueryValidatorSlashesRequest: { + encode(message: QueryValidatorSlashesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorSlashesRequest; + fromPartial(object: DeepPartial): QueryValidatorSlashesRequest; +}; +export declare const QueryValidatorSlashesResponse: { + encode(message: QueryValidatorSlashesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorSlashesResponse; + fromPartial(object: DeepPartial): QueryValidatorSlashesResponse; +}; +export declare const QueryDelegationRewardsRequest: { + encode(message: QueryDelegationRewardsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegationRewardsRequest; + fromPartial(object: DeepPartial): QueryDelegationRewardsRequest; +}; +export declare const QueryDelegationRewardsResponse: { + encode(message: QueryDelegationRewardsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegationRewardsResponse; + fromPartial(object: DeepPartial): QueryDelegationRewardsResponse; +}; +export declare const QueryDelegationTotalRewardsRequest: { + encode(message: QueryDelegationTotalRewardsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegationTotalRewardsRequest; + fromPartial(object: DeepPartial): QueryDelegationTotalRewardsRequest; +}; +export declare const QueryDelegationTotalRewardsResponse: { + encode(message: QueryDelegationTotalRewardsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegationTotalRewardsResponse; + fromPartial(object: DeepPartial): QueryDelegationTotalRewardsResponse; +}; +export declare const QueryDelegatorValidatorsRequest: { + encode(message: QueryDelegatorValidatorsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorValidatorsRequest; + fromPartial(object: DeepPartial): QueryDelegatorValidatorsRequest; +}; +export declare const QueryDelegatorValidatorsResponse: { + encode(message: QueryDelegatorValidatorsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorValidatorsResponse; + fromPartial(object: DeepPartial): QueryDelegatorValidatorsResponse; +}; +export declare const QueryDelegatorWithdrawAddressRequest: { + encode(message: QueryDelegatorWithdrawAddressRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorWithdrawAddressRequest; + fromPartial(object: DeepPartial): QueryDelegatorWithdrawAddressRequest; +}; +export declare const QueryDelegatorWithdrawAddressResponse: { + encode(message: QueryDelegatorWithdrawAddressResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorWithdrawAddressResponse; + fromPartial(object: DeepPartial): QueryDelegatorWithdrawAddressResponse; +}; +export declare const QueryCommunityPoolRequest: { + encode(_: QueryCommunityPoolRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCommunityPoolRequest; + fromPartial(_: DeepPartial): QueryCommunityPoolRequest; +}; +export declare const QueryCommunityPoolResponse: { + encode(message: QueryCommunityPoolResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCommunityPoolResponse; + fromPartial(object: DeepPartial): QueryCommunityPoolResponse; +}; diff --git a/packages/codegen/dist/cosmos/distribution/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/distribution/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..1271db0c --- /dev/null +++ b/packages/codegen/dist/cosmos/distribution/v1beta1/query.lcd.d.ts @@ -0,0 +1,17 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryParamsRequest, QueryParamsResponseSDKType, QueryValidatorOutstandingRewardsRequest, QueryValidatorOutstandingRewardsResponseSDKType, QueryValidatorCommissionRequest, QueryValidatorCommissionResponseSDKType, QueryValidatorSlashesRequest, QueryValidatorSlashesResponseSDKType, QueryDelegationRewardsRequest, QueryDelegationRewardsResponseSDKType, QueryDelegationTotalRewardsRequest, QueryDelegationTotalRewardsResponseSDKType, QueryDelegatorValidatorsRequest, QueryDelegatorValidatorsResponseSDKType, QueryDelegatorWithdrawAddressRequest, QueryDelegatorWithdrawAddressResponseSDKType, QueryCommunityPoolRequest, QueryCommunityPoolResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + params(_params?: QueryParamsRequest): Promise; + validatorOutstandingRewards(params: QueryValidatorOutstandingRewardsRequest): Promise; + validatorCommission(params: QueryValidatorCommissionRequest): Promise; + validatorSlashes(params: QueryValidatorSlashesRequest): Promise; + delegationRewards(params: QueryDelegationRewardsRequest): Promise; + delegationTotalRewards(params: QueryDelegationTotalRewardsRequest): Promise; + delegatorValidators(params: QueryDelegatorValidatorsRequest): Promise; + delegatorWithdrawAddress(params: QueryDelegatorWithdrawAddressRequest): Promise; + communityPool(_params?: QueryCommunityPoolRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/distribution/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/distribution/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..be0d7a15 --- /dev/null +++ b/packages/codegen/dist/cosmos/distribution/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,51 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryParamsRequest, QueryParamsResponse, QueryValidatorOutstandingRewardsRequest, QueryValidatorOutstandingRewardsResponse, QueryValidatorCommissionRequest, QueryValidatorCommissionResponse, QueryValidatorSlashesRequest, QueryValidatorSlashesResponse, QueryDelegationRewardsRequest, QueryDelegationRewardsResponse, QueryDelegationTotalRewardsRequest, QueryDelegationTotalRewardsResponse, QueryDelegatorValidatorsRequest, QueryDelegatorValidatorsResponse, QueryDelegatorWithdrawAddressRequest, QueryDelegatorWithdrawAddressResponse, QueryCommunityPoolRequest, QueryCommunityPoolResponse } from "./query"; +/** Query defines the gRPC querier service for distribution module. */ +export interface Query { + /** Params queries params of the distribution module. */ + params(request?: QueryParamsRequest): Promise; + /** ValidatorOutstandingRewards queries rewards of a validator address. */ + validatorOutstandingRewards(request: QueryValidatorOutstandingRewardsRequest): Promise; + /** ValidatorCommission queries accumulated commission for a validator. */ + validatorCommission(request: QueryValidatorCommissionRequest): Promise; + /** ValidatorSlashes queries slash events of a validator. */ + validatorSlashes(request: QueryValidatorSlashesRequest): Promise; + /** DelegationRewards queries the total rewards accrued by a delegation. */ + delegationRewards(request: QueryDelegationRewardsRequest): Promise; + /** + * DelegationTotalRewards queries the total rewards accrued by a each + * validator. + */ + delegationTotalRewards(request: QueryDelegationTotalRewardsRequest): Promise; + /** DelegatorValidators queries the validators of a delegator. */ + delegatorValidators(request: QueryDelegatorValidatorsRequest): Promise; + /** DelegatorWithdrawAddress queries withdraw address of a delegator. */ + delegatorWithdrawAddress(request: QueryDelegatorWithdrawAddressRequest): Promise; + /** CommunityPool queries the community pool coins. */ + communityPool(request?: QueryCommunityPoolRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + params(request?: QueryParamsRequest): Promise; + validatorOutstandingRewards(request: QueryValidatorOutstandingRewardsRequest): Promise; + validatorCommission(request: QueryValidatorCommissionRequest): Promise; + validatorSlashes(request: QueryValidatorSlashesRequest): Promise; + delegationRewards(request: QueryDelegationRewardsRequest): Promise; + delegationTotalRewards(request: QueryDelegationTotalRewardsRequest): Promise; + delegatorValidators(request: QueryDelegatorValidatorsRequest): Promise; + delegatorWithdrawAddress(request: QueryDelegatorWithdrawAddressRequest): Promise; + communityPool(request?: QueryCommunityPoolRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + params(request?: QueryParamsRequest): Promise; + validatorOutstandingRewards(request: QueryValidatorOutstandingRewardsRequest): Promise; + validatorCommission(request: QueryValidatorCommissionRequest): Promise; + validatorSlashes(request: QueryValidatorSlashesRequest): Promise; + delegationRewards(request: QueryDelegationRewardsRequest): Promise; + delegationTotalRewards(request: QueryDelegationTotalRewardsRequest): Promise; + delegatorValidators(request: QueryDelegatorValidatorsRequest): Promise; + delegatorWithdrawAddress(request: QueryDelegatorWithdrawAddressRequest): Promise; + communityPool(request?: QueryCommunityPoolRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/distribution/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/distribution/v1beta1/tx.d.ts new file mode 100644 index 00000000..244496e5 --- /dev/null +++ b/packages/codegen/dist/cosmos/distribution/v1beta1/tx.d.ts @@ -0,0 +1,133 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * MsgSetWithdrawAddress sets the withdraw address for + * a delegator (or validator self-delegation). + */ +export interface MsgSetWithdrawAddress { + delegatorAddress: string; + withdrawAddress: string; +} +/** + * MsgSetWithdrawAddress sets the withdraw address for + * a delegator (or validator self-delegation). + */ +export interface MsgSetWithdrawAddressSDKType { + delegator_address: string; + withdraw_address: string; +} +/** MsgSetWithdrawAddressResponse defines the Msg/SetWithdrawAddress response type. */ +export interface MsgSetWithdrawAddressResponse { +} +/** MsgSetWithdrawAddressResponse defines the Msg/SetWithdrawAddress response type. */ +export interface MsgSetWithdrawAddressResponseSDKType { +} +/** + * MsgWithdrawDelegatorReward represents delegation withdrawal to a delegator + * from a single validator. + */ +export interface MsgWithdrawDelegatorReward { + delegatorAddress: string; + validatorAddress: string; +} +/** + * MsgWithdrawDelegatorReward represents delegation withdrawal to a delegator + * from a single validator. + */ +export interface MsgWithdrawDelegatorRewardSDKType { + delegator_address: string; + validator_address: string; +} +/** MsgWithdrawDelegatorRewardResponse defines the Msg/WithdrawDelegatorReward response type. */ +export interface MsgWithdrawDelegatorRewardResponse { + amount: Coin[]; +} +/** MsgWithdrawDelegatorRewardResponse defines the Msg/WithdrawDelegatorReward response type. */ +export interface MsgWithdrawDelegatorRewardResponseSDKType { + amount: CoinSDKType[]; +} +/** + * MsgWithdrawValidatorCommission withdraws the full commission to the validator + * address. + */ +export interface MsgWithdrawValidatorCommission { + validatorAddress: string; +} +/** + * MsgWithdrawValidatorCommission withdraws the full commission to the validator + * address. + */ +export interface MsgWithdrawValidatorCommissionSDKType { + validator_address: string; +} +/** MsgWithdrawValidatorCommissionResponse defines the Msg/WithdrawValidatorCommission response type. */ +export interface MsgWithdrawValidatorCommissionResponse { + amount: Coin[]; +} +/** MsgWithdrawValidatorCommissionResponse defines the Msg/WithdrawValidatorCommission response type. */ +export interface MsgWithdrawValidatorCommissionResponseSDKType { + amount: CoinSDKType[]; +} +/** + * MsgFundCommunityPool allows an account to directly + * fund the community pool. + */ +export interface MsgFundCommunityPool { + amount: Coin[]; + depositor: string; +} +/** + * MsgFundCommunityPool allows an account to directly + * fund the community pool. + */ +export interface MsgFundCommunityPoolSDKType { + amount: CoinSDKType[]; + depositor: string; +} +/** MsgFundCommunityPoolResponse defines the Msg/FundCommunityPool response type. */ +export interface MsgFundCommunityPoolResponse { +} +/** MsgFundCommunityPoolResponse defines the Msg/FundCommunityPool response type. */ +export interface MsgFundCommunityPoolResponseSDKType { +} +export declare const MsgSetWithdrawAddress: { + encode(message: MsgSetWithdrawAddress, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetWithdrawAddress; + fromPartial(object: DeepPartial): MsgSetWithdrawAddress; +}; +export declare const MsgSetWithdrawAddressResponse: { + encode(_: MsgSetWithdrawAddressResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetWithdrawAddressResponse; + fromPartial(_: DeepPartial): MsgSetWithdrawAddressResponse; +}; +export declare const MsgWithdrawDelegatorReward: { + encode(message: MsgWithdrawDelegatorReward, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawDelegatorReward; + fromPartial(object: DeepPartial): MsgWithdrawDelegatorReward; +}; +export declare const MsgWithdrawDelegatorRewardResponse: { + encode(message: MsgWithdrawDelegatorRewardResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawDelegatorRewardResponse; + fromPartial(object: DeepPartial): MsgWithdrawDelegatorRewardResponse; +}; +export declare const MsgWithdrawValidatorCommission: { + encode(message: MsgWithdrawValidatorCommission, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawValidatorCommission; + fromPartial(object: DeepPartial): MsgWithdrawValidatorCommission; +}; +export declare const MsgWithdrawValidatorCommissionResponse: { + encode(message: MsgWithdrawValidatorCommissionResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawValidatorCommissionResponse; + fromPartial(object: DeepPartial): MsgWithdrawValidatorCommissionResponse; +}; +export declare const MsgFundCommunityPool: { + encode(message: MsgFundCommunityPool, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgFundCommunityPool; + fromPartial(object: DeepPartial): MsgFundCommunityPool; +}; +export declare const MsgFundCommunityPoolResponse: { + encode(_: MsgFundCommunityPoolResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgFundCommunityPoolResponse; + fromPartial(_: DeepPartial): MsgFundCommunityPoolResponse; +}; diff --git a/packages/codegen/dist/cosmos/distribution/v1beta1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/distribution/v1beta1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..38723e88 --- /dev/null +++ b/packages/codegen/dist/cosmos/distribution/v1beta1/tx.rpc.msg.d.ts @@ -0,0 +1,33 @@ +import { Rpc } from "../../../helpers"; +import { MsgSetWithdrawAddress, MsgSetWithdrawAddressResponse, MsgWithdrawDelegatorReward, MsgWithdrawDelegatorRewardResponse, MsgWithdrawValidatorCommission, MsgWithdrawValidatorCommissionResponse, MsgFundCommunityPool, MsgFundCommunityPoolResponse } from "./tx"; +/** Msg defines the distribution Msg service. */ +export interface Msg { + /** + * SetWithdrawAddress defines a method to change the withdraw address + * for a delegator (or validator self-delegation). + */ + setWithdrawAddress(request: MsgSetWithdrawAddress): Promise; + /** + * WithdrawDelegatorReward defines a method to withdraw rewards of delegator + * from a single validator. + */ + withdrawDelegatorReward(request: MsgWithdrawDelegatorReward): Promise; + /** + * WithdrawValidatorCommission defines a method to withdraw the + * full commission to the validator address. + */ + withdrawValidatorCommission(request: MsgWithdrawValidatorCommission): Promise; + /** + * FundCommunityPool defines a method to allow an account to directly + * fund the community pool. + */ + fundCommunityPool(request: MsgFundCommunityPool): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + setWithdrawAddress(request: MsgSetWithdrawAddress): Promise; + withdrawDelegatorReward(request: MsgWithdrawDelegatorReward): Promise; + withdrawValidatorCommission(request: MsgWithdrawValidatorCommission): Promise; + fundCommunityPool(request: MsgFundCommunityPool): Promise; +} diff --git a/packages/codegen/dist/cosmos/evidence/v1beta1/evidence.d.ts b/packages/codegen/dist/cosmos/evidence/v1beta1/evidence.d.ts new file mode 100644 index 00000000..68beee4f --- /dev/null +++ b/packages/codegen/dist/cosmos/evidence/v1beta1/evidence.d.ts @@ -0,0 +1,28 @@ +/// +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * Equivocation implements the Evidence interface and defines evidence of double + * signing misbehavior. + */ +export interface Equivocation { + height: Long; + time?: Date; + power: Long; + consensusAddress: string; +} +/** + * Equivocation implements the Evidence interface and defines evidence of double + * signing misbehavior. + */ +export interface EquivocationSDKType { + height: Long; + time?: Date; + power: Long; + consensus_address: string; +} +export declare const Equivocation: { + encode(message: Equivocation, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Equivocation; + fromPartial(object: DeepPartial): Equivocation; +}; diff --git a/packages/codegen/dist/cosmos/evidence/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/evidence/v1beta1/genesis.d.ts new file mode 100644 index 00000000..13c22c80 --- /dev/null +++ b/packages/codegen/dist/cosmos/evidence/v1beta1/genesis.d.ts @@ -0,0 +1,17 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the evidence module's genesis state. */ +export interface GenesisState { + /** evidence defines all the evidence at genesis. */ + evidence: Any[]; +} +/** GenesisState defines the evidence module's genesis state. */ +export interface GenesisStateSDKType { + evidence: AnySDKType[]; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/cosmos/evidence/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/evidence/v1beta1/query.d.ts new file mode 100644 index 00000000..9607b9c2 --- /dev/null +++ b/packages/codegen/dist/cosmos/evidence/v1beta1/query.d.ts @@ -0,0 +1,75 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryEvidenceRequest is the request type for the Query/Evidence RPC method. */ +export interface QueryEvidenceRequest { + /** evidence_hash defines the hash of the requested evidence. */ + evidenceHash: Uint8Array; +} +/** QueryEvidenceRequest is the request type for the Query/Evidence RPC method. */ +export interface QueryEvidenceRequestSDKType { + evidence_hash: Uint8Array; +} +/** QueryEvidenceResponse is the response type for the Query/Evidence RPC method. */ +export interface QueryEvidenceResponse { + /** evidence returns the requested evidence. */ + evidence?: Any; +} +/** QueryEvidenceResponse is the response type for the Query/Evidence RPC method. */ +export interface QueryEvidenceResponseSDKType { + evidence?: AnySDKType; +} +/** + * QueryEvidenceRequest is the request type for the Query/AllEvidence RPC + * method. + */ +export interface QueryAllEvidenceRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryEvidenceRequest is the request type for the Query/AllEvidence RPC + * method. + */ +export interface QueryAllEvidenceRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryAllEvidenceResponse is the response type for the Query/AllEvidence RPC + * method. + */ +export interface QueryAllEvidenceResponse { + /** evidence returns all evidences. */ + evidence: Any[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryAllEvidenceResponse is the response type for the Query/AllEvidence RPC + * method. + */ +export interface QueryAllEvidenceResponseSDKType { + evidence: AnySDKType[]; + pagination?: PageResponseSDKType; +} +export declare const QueryEvidenceRequest: { + encode(message: QueryEvidenceRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryEvidenceRequest; + fromPartial(object: DeepPartial): QueryEvidenceRequest; +}; +export declare const QueryEvidenceResponse: { + encode(message: QueryEvidenceResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryEvidenceResponse; + fromPartial(object: DeepPartial): QueryEvidenceResponse; +}; +export declare const QueryAllEvidenceRequest: { + encode(message: QueryAllEvidenceRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllEvidenceRequest; + fromPartial(object: DeepPartial): QueryAllEvidenceRequest; +}; +export declare const QueryAllEvidenceResponse: { + encode(message: QueryAllEvidenceResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllEvidenceResponse; + fromPartial(object: DeepPartial): QueryAllEvidenceResponse; +}; diff --git a/packages/codegen/dist/cosmos/evidence/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/evidence/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..7bca5ded --- /dev/null +++ b/packages/codegen/dist/cosmos/evidence/v1beta1/query.lcd.d.ts @@ -0,0 +1,10 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryEvidenceRequest, QueryEvidenceResponseSDKType, QueryAllEvidenceRequest, QueryAllEvidenceResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + evidence(params: QueryEvidenceRequest): Promise; + allEvidence(params?: QueryAllEvidenceRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/evidence/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/evidence/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..190f6355 --- /dev/null +++ b/packages/codegen/dist/cosmos/evidence/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,20 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryEvidenceRequest, QueryEvidenceResponse, QueryAllEvidenceRequest, QueryAllEvidenceResponse } from "./query"; +/** Query defines the gRPC querier service. */ +export interface Query { + /** Evidence queries evidence based on evidence hash. */ + evidence(request: QueryEvidenceRequest): Promise; + /** AllEvidence queries all evidence. */ + allEvidence(request?: QueryAllEvidenceRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + evidence(request: QueryEvidenceRequest): Promise; + allEvidence(request?: QueryAllEvidenceRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + evidence(request: QueryEvidenceRequest): Promise; + allEvidence(request?: QueryAllEvidenceRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/evidence/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/evidence/v1beta1/tx.d.ts new file mode 100644 index 00000000..326ec2d2 --- /dev/null +++ b/packages/codegen/dist/cosmos/evidence/v1beta1/tx.d.ts @@ -0,0 +1,38 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * MsgSubmitEvidence represents a message that supports submitting arbitrary + * Evidence of misbehavior such as equivocation or counterfactual signing. + */ +export interface MsgSubmitEvidence { + submitter: string; + evidence?: Any; +} +/** + * MsgSubmitEvidence represents a message that supports submitting arbitrary + * Evidence of misbehavior such as equivocation or counterfactual signing. + */ +export interface MsgSubmitEvidenceSDKType { + submitter: string; + evidence?: AnySDKType; +} +/** MsgSubmitEvidenceResponse defines the Msg/SubmitEvidence response type. */ +export interface MsgSubmitEvidenceResponse { + /** hash defines the hash of the evidence. */ + hash: Uint8Array; +} +/** MsgSubmitEvidenceResponse defines the Msg/SubmitEvidence response type. */ +export interface MsgSubmitEvidenceResponseSDKType { + hash: Uint8Array; +} +export declare const MsgSubmitEvidence: { + encode(message: MsgSubmitEvidence, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitEvidence; + fromPartial(object: DeepPartial): MsgSubmitEvidence; +}; +export declare const MsgSubmitEvidenceResponse: { + encode(message: MsgSubmitEvidenceResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitEvidenceResponse; + fromPartial(object: DeepPartial): MsgSubmitEvidenceResponse; +}; diff --git a/packages/codegen/dist/cosmos/evidence/v1beta1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/evidence/v1beta1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..5eeace2c --- /dev/null +++ b/packages/codegen/dist/cosmos/evidence/v1beta1/tx.rpc.msg.d.ts @@ -0,0 +1,15 @@ +import { Rpc } from "../../../helpers"; +import { MsgSubmitEvidence, MsgSubmitEvidenceResponse } from "./tx"; +/** Msg defines the evidence Msg service. */ +export interface Msg { + /** + * SubmitEvidence submits an arbitrary Evidence of misbehavior such as equivocation or + * counterfactual signing. + */ + submitEvidence(request: MsgSubmitEvidence): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + submitEvidence(request: MsgSubmitEvidence): Promise; +} diff --git a/packages/codegen/dist/cosmos/feegrant/v1beta1/feegrant.d.ts b/packages/codegen/dist/cosmos/feegrant/v1beta1/feegrant.d.ts new file mode 100644 index 00000000..a3333d94 --- /dev/null +++ b/packages/codegen/dist/cosmos/feegrant/v1beta1/feegrant.d.ts @@ -0,0 +1,111 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Duration, DurationSDKType } from "../../../google/protobuf/duration"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * BasicAllowance implements Allowance with a one-time grant of tokens + * that optionally expires. The grantee can use up to SpendLimit to cover fees. + */ +export interface BasicAllowance { + /** + * spend_limit specifies the maximum amount of tokens that can be spent + * by this allowance and will be updated as tokens are spent. If it is + * empty, there is no spend limit and any amount of coins can be spent. + */ + spendLimit: Coin[]; + /** expiration specifies an optional time when this allowance expires */ + expiration?: Date; +} +/** + * BasicAllowance implements Allowance with a one-time grant of tokens + * that optionally expires. The grantee can use up to SpendLimit to cover fees. + */ +export interface BasicAllowanceSDKType { + spend_limit: CoinSDKType[]; + expiration?: Date; +} +/** + * PeriodicAllowance extends Allowance to allow for both a maximum cap, + * as well as a limit per time period. + */ +export interface PeriodicAllowance { + /** basic specifies a struct of `BasicAllowance` */ + basic?: BasicAllowance; + /** + * period specifies the time duration in which period_spend_limit coins can + * be spent before that allowance is reset + */ + period?: Duration; + /** + * period_spend_limit specifies the maximum number of coins that can be spent + * in the period + */ + periodSpendLimit: Coin[]; + /** period_can_spend is the number of coins left to be spent before the period_reset time */ + periodCanSpend: Coin[]; + /** + * period_reset is the time at which this period resets and a new one begins, + * it is calculated from the start time of the first transaction after the + * last period ended + */ + periodReset?: Date; +} +/** + * PeriodicAllowance extends Allowance to allow for both a maximum cap, + * as well as a limit per time period. + */ +export interface PeriodicAllowanceSDKType { + basic?: BasicAllowanceSDKType; + period?: DurationSDKType; + period_spend_limit: CoinSDKType[]; + period_can_spend: CoinSDKType[]; + period_reset?: Date; +} +/** AllowedMsgAllowance creates allowance only for specified message types. */ +export interface AllowedMsgAllowance { + /** allowance can be any of basic and periodic fee allowance. */ + allowance?: Any; + /** allowed_messages are the messages for which the grantee has the access. */ + allowedMessages: string[]; +} +/** AllowedMsgAllowance creates allowance only for specified message types. */ +export interface AllowedMsgAllowanceSDKType { + allowance?: AnySDKType; + allowed_messages: string[]; +} +/** Grant is stored in the KVStore to record a grant with full context */ +export interface Grant { + /** granter is the address of the user granting an allowance of their funds. */ + granter: string; + /** grantee is the address of the user being granted an allowance of another user's funds. */ + grantee: string; + /** allowance can be any of basic, periodic, allowed fee allowance. */ + allowance?: Any; +} +/** Grant is stored in the KVStore to record a grant with full context */ +export interface GrantSDKType { + granter: string; + grantee: string; + allowance?: AnySDKType; +} +export declare const BasicAllowance: { + encode(message: BasicAllowance, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BasicAllowance; + fromPartial(object: DeepPartial): BasicAllowance; +}; +export declare const PeriodicAllowance: { + encode(message: PeriodicAllowance, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PeriodicAllowance; + fromPartial(object: DeepPartial): PeriodicAllowance; +}; +export declare const AllowedMsgAllowance: { + encode(message: AllowedMsgAllowance, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AllowedMsgAllowance; + fromPartial(object: DeepPartial): AllowedMsgAllowance; +}; +export declare const Grant: { + encode(message: Grant, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Grant; + fromPartial(object: DeepPartial): Grant; +}; diff --git a/packages/codegen/dist/cosmos/feegrant/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/feegrant/v1beta1/genesis.d.ts new file mode 100644 index 00000000..44faac46 --- /dev/null +++ b/packages/codegen/dist/cosmos/feegrant/v1beta1/genesis.d.ts @@ -0,0 +1,16 @@ +import { Grant, GrantSDKType } from "./feegrant"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState contains a set of fee allowances, persisted from the store */ +export interface GenesisState { + allowances: Grant[]; +} +/** GenesisState contains a set of fee allowances, persisted from the store */ +export interface GenesisStateSDKType { + allowances: GrantSDKType[]; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/cosmos/feegrant/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/feegrant/v1beta1/query.d.ts new file mode 100644 index 00000000..8a0a3b58 --- /dev/null +++ b/packages/codegen/dist/cosmos/feegrant/v1beta1/query.d.ts @@ -0,0 +1,101 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Grant, GrantSDKType } from "./feegrant"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryAllowanceRequest is the request type for the Query/Allowance RPC method. */ +export interface QueryAllowanceRequest { + /** granter is the address of the user granting an allowance of their funds. */ + granter: string; + /** grantee is the address of the user being granted an allowance of another user's funds. */ + grantee: string; +} +/** QueryAllowanceRequest is the request type for the Query/Allowance RPC method. */ +export interface QueryAllowanceRequestSDKType { + granter: string; + grantee: string; +} +/** QueryAllowanceResponse is the response type for the Query/Allowance RPC method. */ +export interface QueryAllowanceResponse { + /** allowance is a allowance granted for grantee by granter. */ + allowance?: Grant; +} +/** QueryAllowanceResponse is the response type for the Query/Allowance RPC method. */ +export interface QueryAllowanceResponseSDKType { + allowance?: GrantSDKType; +} +/** QueryAllowancesRequest is the request type for the Query/Allowances RPC method. */ +export interface QueryAllowancesRequest { + grantee: string; + /** pagination defines an pagination for the request. */ + pagination?: PageRequest; +} +/** QueryAllowancesRequest is the request type for the Query/Allowances RPC method. */ +export interface QueryAllowancesRequestSDKType { + grantee: string; + pagination?: PageRequestSDKType; +} +/** QueryAllowancesResponse is the response type for the Query/Allowances RPC method. */ +export interface QueryAllowancesResponse { + /** allowances are allowance's granted for grantee by granter. */ + allowances: Grant[]; + /** pagination defines an pagination for the response. */ + pagination?: PageResponse; +} +/** QueryAllowancesResponse is the response type for the Query/Allowances RPC method. */ +export interface QueryAllowancesResponseSDKType { + allowances: GrantSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryAllowancesByGranterRequest is the request type for the Query/AllowancesByGranter RPC method. */ +export interface QueryAllowancesByGranterRequest { + granter: string; + /** pagination defines an pagination for the request. */ + pagination?: PageRequest; +} +/** QueryAllowancesByGranterRequest is the request type for the Query/AllowancesByGranter RPC method. */ +export interface QueryAllowancesByGranterRequestSDKType { + granter: string; + pagination?: PageRequestSDKType; +} +/** QueryAllowancesByGranterResponse is the response type for the Query/AllowancesByGranter RPC method. */ +export interface QueryAllowancesByGranterResponse { + /** allowances that have been issued by the granter. */ + allowances: Grant[]; + /** pagination defines an pagination for the response. */ + pagination?: PageResponse; +} +/** QueryAllowancesByGranterResponse is the response type for the Query/AllowancesByGranter RPC method. */ +export interface QueryAllowancesByGranterResponseSDKType { + allowances: GrantSDKType[]; + pagination?: PageResponseSDKType; +} +export declare const QueryAllowanceRequest: { + encode(message: QueryAllowanceRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllowanceRequest; + fromPartial(object: DeepPartial): QueryAllowanceRequest; +}; +export declare const QueryAllowanceResponse: { + encode(message: QueryAllowanceResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllowanceResponse; + fromPartial(object: DeepPartial): QueryAllowanceResponse; +}; +export declare const QueryAllowancesRequest: { + encode(message: QueryAllowancesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllowancesRequest; + fromPartial(object: DeepPartial): QueryAllowancesRequest; +}; +export declare const QueryAllowancesResponse: { + encode(message: QueryAllowancesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllowancesResponse; + fromPartial(object: DeepPartial): QueryAllowancesResponse; +}; +export declare const QueryAllowancesByGranterRequest: { + encode(message: QueryAllowancesByGranterRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllowancesByGranterRequest; + fromPartial(object: DeepPartial): QueryAllowancesByGranterRequest; +}; +export declare const QueryAllowancesByGranterResponse: { + encode(message: QueryAllowancesByGranterResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllowancesByGranterResponse; + fromPartial(object: DeepPartial): QueryAllowancesByGranterResponse; +}; diff --git a/packages/codegen/dist/cosmos/feegrant/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/feegrant/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..7669c054 --- /dev/null +++ b/packages/codegen/dist/cosmos/feegrant/v1beta1/query.lcd.d.ts @@ -0,0 +1,11 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryAllowanceRequest, QueryAllowanceResponseSDKType, QueryAllowancesRequest, QueryAllowancesResponseSDKType, QueryAllowancesByGranterRequest, QueryAllowancesByGranterResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + allowance(params: QueryAllowanceRequest): Promise; + allowances(params: QueryAllowancesRequest): Promise; + allowancesByGranter(params: QueryAllowancesByGranterRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/feegrant/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/feegrant/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..07185e44 --- /dev/null +++ b/packages/codegen/dist/cosmos/feegrant/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,27 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryAllowanceRequest, QueryAllowanceResponse, QueryAllowancesRequest, QueryAllowancesResponse, QueryAllowancesByGranterRequest, QueryAllowancesByGranterResponse } from "./query"; +/** Query defines the gRPC querier service. */ +export interface Query { + /** Allowance returns fee granted to the grantee by the granter. */ + allowance(request: QueryAllowanceRequest): Promise; + /** Allowances returns all the grants for address. */ + allowances(request: QueryAllowancesRequest): Promise; + /** + * AllowancesByGranter returns all the grants given by an address + * Since v0.46 + */ + allowancesByGranter(request: QueryAllowancesByGranterRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + allowance(request: QueryAllowanceRequest): Promise; + allowances(request: QueryAllowancesRequest): Promise; + allowancesByGranter(request: QueryAllowancesByGranterRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + allowance(request: QueryAllowanceRequest): Promise; + allowances(request: QueryAllowancesRequest): Promise; + allowancesByGranter(request: QueryAllowancesByGranterRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/feegrant/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/feegrant/v1beta1/tx.d.ts new file mode 100644 index 00000000..9d6ffe92 --- /dev/null +++ b/packages/codegen/dist/cosmos/feegrant/v1beta1/tx.d.ts @@ -0,0 +1,68 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * MsgGrantAllowance adds permission for Grantee to spend up to Allowance + * of fees from the account of Granter. + */ +export interface MsgGrantAllowance { + /** granter is the address of the user granting an allowance of their funds. */ + granter: string; + /** grantee is the address of the user being granted an allowance of another user's funds. */ + grantee: string; + /** allowance can be any of basic, periodic, allowed fee allowance. */ + allowance?: Any; +} +/** + * MsgGrantAllowance adds permission for Grantee to spend up to Allowance + * of fees from the account of Granter. + */ +export interface MsgGrantAllowanceSDKType { + granter: string; + grantee: string; + allowance?: AnySDKType; +} +/** MsgGrantAllowanceResponse defines the Msg/GrantAllowanceResponse response type. */ +export interface MsgGrantAllowanceResponse { +} +/** MsgGrantAllowanceResponse defines the Msg/GrantAllowanceResponse response type. */ +export interface MsgGrantAllowanceResponseSDKType { +} +/** MsgRevokeAllowance removes any existing Allowance from Granter to Grantee. */ +export interface MsgRevokeAllowance { + /** granter is the address of the user granting an allowance of their funds. */ + granter: string; + /** grantee is the address of the user being granted an allowance of another user's funds. */ + grantee: string; +} +/** MsgRevokeAllowance removes any existing Allowance from Granter to Grantee. */ +export interface MsgRevokeAllowanceSDKType { + granter: string; + grantee: string; +} +/** MsgRevokeAllowanceResponse defines the Msg/RevokeAllowanceResponse response type. */ +export interface MsgRevokeAllowanceResponse { +} +/** MsgRevokeAllowanceResponse defines the Msg/RevokeAllowanceResponse response type. */ +export interface MsgRevokeAllowanceResponseSDKType { +} +export declare const MsgGrantAllowance: { + encode(message: MsgGrantAllowance, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgGrantAllowance; + fromPartial(object: DeepPartial): MsgGrantAllowance; +}; +export declare const MsgGrantAllowanceResponse: { + encode(_: MsgGrantAllowanceResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgGrantAllowanceResponse; + fromPartial(_: DeepPartial): MsgGrantAllowanceResponse; +}; +export declare const MsgRevokeAllowance: { + encode(message: MsgRevokeAllowance, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeAllowance; + fromPartial(object: DeepPartial): MsgRevokeAllowance; +}; +export declare const MsgRevokeAllowanceResponse: { + encode(_: MsgRevokeAllowanceResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeAllowanceResponse; + fromPartial(_: DeepPartial): MsgRevokeAllowanceResponse; +}; diff --git a/packages/codegen/dist/cosmos/feegrant/v1beta1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/feegrant/v1beta1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..9ec43a55 --- /dev/null +++ b/packages/codegen/dist/cosmos/feegrant/v1beta1/tx.rpc.msg.d.ts @@ -0,0 +1,21 @@ +import { Rpc } from "../../../helpers"; +import { MsgGrantAllowance, MsgGrantAllowanceResponse, MsgRevokeAllowance, MsgRevokeAllowanceResponse } from "./tx"; +/** Msg defines the feegrant msg service. */ +export interface Msg { + /** + * GrantAllowance grants fee allowance to the grantee on the granter's + * account with the provided expiration time. + */ + grantAllowance(request: MsgGrantAllowance): Promise; + /** + * RevokeAllowance revokes any fee allowance of granter's account that + * has been granted to the grantee. + */ + revokeAllowance(request: MsgRevokeAllowance): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + grantAllowance(request: MsgGrantAllowance): Promise; + revokeAllowance(request: MsgRevokeAllowance): Promise; +} diff --git a/packages/codegen/dist/cosmos/genutil/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/genutil/v1beta1/genesis.d.ts new file mode 100644 index 00000000..a501140a --- /dev/null +++ b/packages/codegen/dist/cosmos/genutil/v1beta1/genesis.d.ts @@ -0,0 +1,16 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the raw genesis transaction in JSON. */ +export interface GenesisState { + /** gen_txs defines the genesis transactions. */ + genTxs: Uint8Array[]; +} +/** GenesisState defines the raw genesis transaction in JSON. */ +export interface GenesisStateSDKType { + gen_txs: Uint8Array[]; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/cosmos/gov/v1/genesis.d.ts b/packages/codegen/dist/cosmos/gov/v1/genesis.d.ts new file mode 100644 index 00000000..7d690f57 --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1/genesis.d.ts @@ -0,0 +1,36 @@ +/// +import { Deposit, DepositSDKType, Vote, VoteSDKType, Proposal, ProposalSDKType, DepositParams, DepositParamsSDKType, VotingParams, VotingParamsSDKType, TallyParams, TallyParamsSDKType } from "./gov"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisState defines the gov module's genesis state. */ +export interface GenesisState { + /** starting_proposal_id is the ID of the starting proposal. */ + startingProposalId: Long; + /** deposits defines all the deposits present at genesis. */ + deposits: Deposit[]; + /** votes defines all the votes present at genesis. */ + votes: Vote[]; + /** proposals defines all the proposals present at genesis. */ + proposals: Proposal[]; + /** params defines all the paramaters of related to deposit. */ + depositParams?: DepositParams; + /** params defines all the paramaters of related to voting. */ + votingParams?: VotingParams; + /** params defines all the paramaters of related to tally. */ + tallyParams?: TallyParams; +} +/** GenesisState defines the gov module's genesis state. */ +export interface GenesisStateSDKType { + starting_proposal_id: Long; + deposits: DepositSDKType[]; + votes: VoteSDKType[]; + proposals: ProposalSDKType[]; + deposit_params?: DepositParamsSDKType; + voting_params?: VotingParamsSDKType; + tally_params?: TallyParamsSDKType; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/cosmos/gov/v1/gov.d.ts b/packages/codegen/dist/cosmos/gov/v1/gov.d.ts new file mode 100644 index 00000000..cd18af32 --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1/gov.d.ts @@ -0,0 +1,237 @@ +/// +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Duration, DurationSDKType } from "../../../google/protobuf/duration"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** VoteOption enumerates the valid vote options for a given governance proposal. */ +export declare enum VoteOption { + /** VOTE_OPTION_UNSPECIFIED - VOTE_OPTION_UNSPECIFIED defines a no-op vote option. */ + VOTE_OPTION_UNSPECIFIED = 0, + /** VOTE_OPTION_YES - VOTE_OPTION_YES defines a yes vote option. */ + VOTE_OPTION_YES = 1, + /** VOTE_OPTION_ABSTAIN - VOTE_OPTION_ABSTAIN defines an abstain vote option. */ + VOTE_OPTION_ABSTAIN = 2, + /** VOTE_OPTION_NO - VOTE_OPTION_NO defines a no vote option. */ + VOTE_OPTION_NO = 3, + /** VOTE_OPTION_NO_WITH_VETO - VOTE_OPTION_NO_WITH_VETO defines a no with veto vote option. */ + VOTE_OPTION_NO_WITH_VETO = 4, + UNRECOGNIZED = -1 +} +export declare const VoteOptionSDKType: typeof VoteOption; +export declare function voteOptionFromJSON(object: any): VoteOption; +export declare function voteOptionToJSON(object: VoteOption): string; +/** ProposalStatus enumerates the valid statuses of a proposal. */ +export declare enum ProposalStatus { + /** PROPOSAL_STATUS_UNSPECIFIED - PROPOSAL_STATUS_UNSPECIFIED defines the default propopsal status. */ + PROPOSAL_STATUS_UNSPECIFIED = 0, + /** + * PROPOSAL_STATUS_DEPOSIT_PERIOD - PROPOSAL_STATUS_DEPOSIT_PERIOD defines a proposal status during the deposit + * period. + */ + PROPOSAL_STATUS_DEPOSIT_PERIOD = 1, + /** + * PROPOSAL_STATUS_VOTING_PERIOD - PROPOSAL_STATUS_VOTING_PERIOD defines a proposal status during the voting + * period. + */ + PROPOSAL_STATUS_VOTING_PERIOD = 2, + /** + * PROPOSAL_STATUS_PASSED - PROPOSAL_STATUS_PASSED defines a proposal status of a proposal that has + * passed. + */ + PROPOSAL_STATUS_PASSED = 3, + /** + * PROPOSAL_STATUS_REJECTED - PROPOSAL_STATUS_REJECTED defines a proposal status of a proposal that has + * been rejected. + */ + PROPOSAL_STATUS_REJECTED = 4, + /** + * PROPOSAL_STATUS_FAILED - PROPOSAL_STATUS_FAILED defines a proposal status of a proposal that has + * failed. + */ + PROPOSAL_STATUS_FAILED = 5, + UNRECOGNIZED = -1 +} +export declare const ProposalStatusSDKType: typeof ProposalStatus; +export declare function proposalStatusFromJSON(object: any): ProposalStatus; +export declare function proposalStatusToJSON(object: ProposalStatus): string; +/** WeightedVoteOption defines a unit of vote for vote split. */ +export interface WeightedVoteOption { + option: VoteOption; + weight: string; +} +/** WeightedVoteOption defines a unit of vote for vote split. */ +export interface WeightedVoteOptionSDKType { + option: VoteOption; + weight: string; +} +/** + * Deposit defines an amount deposited by an account address to an active + * proposal. + */ +export interface Deposit { + proposalId: Long; + depositor: string; + amount: Coin[]; +} +/** + * Deposit defines an amount deposited by an account address to an active + * proposal. + */ +export interface DepositSDKType { + proposal_id: Long; + depositor: string; + amount: CoinSDKType[]; +} +/** Proposal defines the core field members of a governance proposal. */ +export interface Proposal { + id: Long; + messages: Any[]; + status: ProposalStatus; + /** + * final_tally_result is the final tally result of the proposal. When + * querying a proposal via gRPC, this field is not populated until the + * proposal's voting period has ended. + */ + finalTallyResult?: TallyResult; + submitTime?: Date; + depositEndTime?: Date; + totalDeposit: Coin[]; + votingStartTime?: Date; + votingEndTime?: Date; + /** metadata is any arbitrary metadata attached to the proposal. */ + metadata: string; +} +/** Proposal defines the core field members of a governance proposal. */ +export interface ProposalSDKType { + id: Long; + messages: AnySDKType[]; + status: ProposalStatus; + final_tally_result?: TallyResultSDKType; + submit_time?: Date; + deposit_end_time?: Date; + total_deposit: CoinSDKType[]; + voting_start_time?: Date; + voting_end_time?: Date; + metadata: string; +} +/** TallyResult defines a standard tally for a governance proposal. */ +export interface TallyResult { + yesCount: string; + abstainCount: string; + noCount: string; + noWithVetoCount: string; +} +/** TallyResult defines a standard tally for a governance proposal. */ +export interface TallyResultSDKType { + yes_count: string; + abstain_count: string; + no_count: string; + no_with_veto_count: string; +} +/** + * Vote defines a vote on a governance proposal. + * A Vote consists of a proposal ID, the voter, and the vote option. + */ +export interface Vote { + proposalId: Long; + voter: string; + options: WeightedVoteOption[]; + /** metadata is any arbitrary metadata to attached to the vote. */ + metadata: string; +} +/** + * Vote defines a vote on a governance proposal. + * A Vote consists of a proposal ID, the voter, and the vote option. + */ +export interface VoteSDKType { + proposal_id: Long; + voter: string; + options: WeightedVoteOptionSDKType[]; + metadata: string; +} +/** DepositParams defines the params for deposits on governance proposals. */ +export interface DepositParams { + /** Minimum deposit for a proposal to enter voting period. */ + minDeposit: Coin[]; + /** + * Maximum period for Atom holders to deposit on a proposal. Initial value: 2 + * months. + */ + maxDepositPeriod?: Duration; +} +/** DepositParams defines the params for deposits on governance proposals. */ +export interface DepositParamsSDKType { + min_deposit: CoinSDKType[]; + max_deposit_period?: DurationSDKType; +} +/** VotingParams defines the params for voting on governance proposals. */ +export interface VotingParams { + /** Length of the voting period. */ + votingPeriod?: Duration; +} +/** VotingParams defines the params for voting on governance proposals. */ +export interface VotingParamsSDKType { + voting_period?: DurationSDKType; +} +/** TallyParams defines the params for tallying votes on governance proposals. */ +export interface TallyParams { + /** + * Minimum percentage of total stake needed to vote for a result to be + * considered valid. + */ + quorum: string; + /** Minimum proportion of Yes votes for proposal to pass. Default value: 0.5. */ + threshold: string; + /** + * Minimum value of Veto votes to Total votes ratio for proposal to be + * vetoed. Default value: 1/3. + */ + vetoThreshold: string; +} +/** TallyParams defines the params for tallying votes on governance proposals. */ +export interface TallyParamsSDKType { + quorum: string; + threshold: string; + veto_threshold: string; +} +export declare const WeightedVoteOption: { + encode(message: WeightedVoteOption, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): WeightedVoteOption; + fromPartial(object: DeepPartial): WeightedVoteOption; +}; +export declare const Deposit: { + encode(message: Deposit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Deposit; + fromPartial(object: DeepPartial): Deposit; +}; +export declare const Proposal: { + encode(message: Proposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Proposal; + fromPartial(object: DeepPartial): Proposal; +}; +export declare const TallyResult: { + encode(message: TallyResult, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TallyResult; + fromPartial(object: DeepPartial): TallyResult; +}; +export declare const Vote: { + encode(message: Vote, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Vote; + fromPartial(object: DeepPartial): Vote; +}; +export declare const DepositParams: { + encode(message: DepositParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DepositParams; + fromPartial(object: DeepPartial): DepositParams; +}; +export declare const VotingParams: { + encode(message: VotingParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): VotingParams; + fromPartial(object: DeepPartial): VotingParams; +}; +export declare const TallyParams: { + encode(message: TallyParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TallyParams; + fromPartial(object: DeepPartial): TallyParams; +}; diff --git a/packages/codegen/dist/cosmos/gov/v1/query.d.ts b/packages/codegen/dist/cosmos/gov/v1/query.d.ts new file mode 100644 index 00000000..05747b9a --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1/query.d.ts @@ -0,0 +1,271 @@ +/// +import { ProposalStatus, Proposal, ProposalSDKType, Vote, VoteSDKType, VotingParams, VotingParamsSDKType, DepositParams, DepositParamsSDKType, TallyParams, TallyParamsSDKType, Deposit, DepositSDKType, TallyResult, TallyResultSDKType } from "./gov"; +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** QueryProposalRequest is the request type for the Query/Proposal RPC method. */ +export interface QueryProposalRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; +} +/** QueryProposalRequest is the request type for the Query/Proposal RPC method. */ +export interface QueryProposalRequestSDKType { + proposal_id: Long; +} +/** QueryProposalResponse is the response type for the Query/Proposal RPC method. */ +export interface QueryProposalResponse { + proposal?: Proposal; +} +/** QueryProposalResponse is the response type for the Query/Proposal RPC method. */ +export interface QueryProposalResponseSDKType { + proposal?: ProposalSDKType; +} +/** QueryProposalsRequest is the request type for the Query/Proposals RPC method. */ +export interface QueryProposalsRequest { + /** proposal_status defines the status of the proposals. */ + proposalStatus: ProposalStatus; + /** voter defines the voter address for the proposals. */ + voter: string; + /** depositor defines the deposit addresses from the proposals. */ + depositor: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryProposalsRequest is the request type for the Query/Proposals RPC method. */ +export interface QueryProposalsRequestSDKType { + proposal_status: ProposalStatus; + voter: string; + depositor: string; + pagination?: PageRequestSDKType; +} +/** + * QueryProposalsResponse is the response type for the Query/Proposals RPC + * method. + */ +export interface QueryProposalsResponse { + proposals: Proposal[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryProposalsResponse is the response type for the Query/Proposals RPC + * method. + */ +export interface QueryProposalsResponseSDKType { + proposals: ProposalSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryVoteRequest is the request type for the Query/Vote RPC method. */ +export interface QueryVoteRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** voter defines the oter address for the proposals. */ + voter: string; +} +/** QueryVoteRequest is the request type for the Query/Vote RPC method. */ +export interface QueryVoteRequestSDKType { + proposal_id: Long; + voter: string; +} +/** QueryVoteResponse is the response type for the Query/Vote RPC method. */ +export interface QueryVoteResponse { + /** vote defined the queried vote. */ + vote?: Vote; +} +/** QueryVoteResponse is the response type for the Query/Vote RPC method. */ +export interface QueryVoteResponseSDKType { + vote?: VoteSDKType; +} +/** QueryVotesRequest is the request type for the Query/Votes RPC method. */ +export interface QueryVotesRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryVotesRequest is the request type for the Query/Votes RPC method. */ +export interface QueryVotesRequestSDKType { + proposal_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryVotesResponse is the response type for the Query/Votes RPC method. */ +export interface QueryVotesResponse { + /** votes defined the queried votes. */ + votes: Vote[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryVotesResponse is the response type for the Query/Votes RPC method. */ +export interface QueryVotesResponseSDKType { + votes: VoteSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequest { + /** + * params_type defines which parameters to query for, can be one of "voting", + * "tallying" or "deposit". + */ + paramsType: string; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequestSDKType { + params_type: string; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** voting_params defines the parameters related to voting. */ + votingParams?: VotingParams; + /** deposit_params defines the parameters related to deposit. */ + depositParams?: DepositParams; + /** tally_params defines the parameters related to tally. */ + tallyParams?: TallyParams; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponseSDKType { + voting_params?: VotingParamsSDKType; + deposit_params?: DepositParamsSDKType; + tally_params?: TallyParamsSDKType; +} +/** QueryDepositRequest is the request type for the Query/Deposit RPC method. */ +export interface QueryDepositRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** depositor defines the deposit addresses from the proposals. */ + depositor: string; +} +/** QueryDepositRequest is the request type for the Query/Deposit RPC method. */ +export interface QueryDepositRequestSDKType { + proposal_id: Long; + depositor: string; +} +/** QueryDepositResponse is the response type for the Query/Deposit RPC method. */ +export interface QueryDepositResponse { + /** deposit defines the requested deposit. */ + deposit?: Deposit; +} +/** QueryDepositResponse is the response type for the Query/Deposit RPC method. */ +export interface QueryDepositResponseSDKType { + deposit?: DepositSDKType; +} +/** QueryDepositsRequest is the request type for the Query/Deposits RPC method. */ +export interface QueryDepositsRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryDepositsRequest is the request type for the Query/Deposits RPC method. */ +export interface QueryDepositsRequestSDKType { + proposal_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryDepositsResponse is the response type for the Query/Deposits RPC method. */ +export interface QueryDepositsResponse { + deposits: Deposit[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryDepositsResponse is the response type for the Query/Deposits RPC method. */ +export interface QueryDepositsResponseSDKType { + deposits: DepositSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryTallyResultRequest is the request type for the Query/Tally RPC method. */ +export interface QueryTallyResultRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; +} +/** QueryTallyResultRequest is the request type for the Query/Tally RPC method. */ +export interface QueryTallyResultRequestSDKType { + proposal_id: Long; +} +/** QueryTallyResultResponse is the response type for the Query/Tally RPC method. */ +export interface QueryTallyResultResponse { + /** tally defines the requested tally. */ + tally?: TallyResult; +} +/** QueryTallyResultResponse is the response type for the Query/Tally RPC method. */ +export interface QueryTallyResultResponseSDKType { + tally?: TallyResultSDKType; +} +export declare const QueryProposalRequest: { + encode(message: QueryProposalRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalRequest; + fromPartial(object: DeepPartial): QueryProposalRequest; +}; +export declare const QueryProposalResponse: { + encode(message: QueryProposalResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalResponse; + fromPartial(object: DeepPartial): QueryProposalResponse; +}; +export declare const QueryProposalsRequest: { + encode(message: QueryProposalsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalsRequest; + fromPartial(object: DeepPartial): QueryProposalsRequest; +}; +export declare const QueryProposalsResponse: { + encode(message: QueryProposalsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalsResponse; + fromPartial(object: DeepPartial): QueryProposalsResponse; +}; +export declare const QueryVoteRequest: { + encode(message: QueryVoteRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVoteRequest; + fromPartial(object: DeepPartial): QueryVoteRequest; +}; +export declare const QueryVoteResponse: { + encode(message: QueryVoteResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVoteResponse; + fromPartial(object: DeepPartial): QueryVoteResponse; +}; +export declare const QueryVotesRequest: { + encode(message: QueryVotesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesRequest; + fromPartial(object: DeepPartial): QueryVotesRequest; +}; +export declare const QueryVotesResponse: { + encode(message: QueryVotesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesResponse; + fromPartial(object: DeepPartial): QueryVotesResponse; +}; +export declare const QueryParamsRequest: { + encode(message: QueryParamsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest; + fromPartial(object: DeepPartial): QueryParamsRequest; +}; +export declare const QueryParamsResponse: { + encode(message: QueryParamsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse; + fromPartial(object: DeepPartial): QueryParamsResponse; +}; +export declare const QueryDepositRequest: { + encode(message: QueryDepositRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositRequest; + fromPartial(object: DeepPartial): QueryDepositRequest; +}; +export declare const QueryDepositResponse: { + encode(message: QueryDepositResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositResponse; + fromPartial(object: DeepPartial): QueryDepositResponse; +}; +export declare const QueryDepositsRequest: { + encode(message: QueryDepositsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositsRequest; + fromPartial(object: DeepPartial): QueryDepositsRequest; +}; +export declare const QueryDepositsResponse: { + encode(message: QueryDepositsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositsResponse; + fromPartial(object: DeepPartial): QueryDepositsResponse; +}; +export declare const QueryTallyResultRequest: { + encode(message: QueryTallyResultRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTallyResultRequest; + fromPartial(object: DeepPartial): QueryTallyResultRequest; +}; +export declare const QueryTallyResultResponse: { + encode(message: QueryTallyResultResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTallyResultResponse; + fromPartial(object: DeepPartial): QueryTallyResultResponse; +}; diff --git a/packages/codegen/dist/cosmos/gov/v1/query.lcd.d.ts b/packages/codegen/dist/cosmos/gov/v1/query.lcd.d.ts new file mode 100644 index 00000000..9ce44b6a --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1/query.lcd.d.ts @@ -0,0 +1,16 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryProposalRequest, QueryProposalResponseSDKType, QueryProposalsRequest, QueryProposalsResponseSDKType, QueryVoteRequest, QueryVoteResponseSDKType, QueryVotesRequest, QueryVotesResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType, QueryDepositRequest, QueryDepositResponseSDKType, QueryDepositsRequest, QueryDepositsResponseSDKType, QueryTallyResultRequest, QueryTallyResultResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + proposal(params: QueryProposalRequest): Promise; + proposals(params: QueryProposalsRequest): Promise; + vote(params: QueryVoteRequest): Promise; + votes(params: QueryVotesRequest): Promise; + params(params: QueryParamsRequest): Promise; + deposit(params: QueryDepositRequest): Promise; + deposits(params: QueryDepositsRequest): Promise; + tallyResult(params: QueryTallyResultRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/gov/v1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/gov/v1/query.rpc.Query.d.ts new file mode 100644 index 00000000..8152da58 --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1/query.rpc.Query.d.ts @@ -0,0 +1,44 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryProposalRequest, QueryProposalResponse, QueryProposalsRequest, QueryProposalsResponse, QueryVoteRequest, QueryVoteResponse, QueryVotesRequest, QueryVotesResponse, QueryParamsRequest, QueryParamsResponse, QueryDepositRequest, QueryDepositResponse, QueryDepositsRequest, QueryDepositsResponse, QueryTallyResultRequest, QueryTallyResultResponse } from "./query"; +/** Query defines the gRPC querier service for gov module */ +export interface Query { + /** Proposal queries proposal details based on ProposalID. */ + proposal(request: QueryProposalRequest): Promise; + /** Proposals queries all proposals based on given status. */ + proposals(request: QueryProposalsRequest): Promise; + /** Vote queries voted information based on proposalID, voterAddr. */ + vote(request: QueryVoteRequest): Promise; + /** Votes queries votes of a given proposal. */ + votes(request: QueryVotesRequest): Promise; + /** Params queries all parameters of the gov module. */ + params(request: QueryParamsRequest): Promise; + /** Deposit queries single deposit information based proposalID, depositAddr. */ + deposit(request: QueryDepositRequest): Promise; + /** Deposits queries all deposits of a single proposal. */ + deposits(request: QueryDepositsRequest): Promise; + /** TallyResult queries the tally of a proposal vote. */ + tallyResult(request: QueryTallyResultRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + proposal(request: QueryProposalRequest): Promise; + proposals(request: QueryProposalsRequest): Promise; + vote(request: QueryVoteRequest): Promise; + votes(request: QueryVotesRequest): Promise; + params(request: QueryParamsRequest): Promise; + deposit(request: QueryDepositRequest): Promise; + deposits(request: QueryDepositsRequest): Promise; + tallyResult(request: QueryTallyResultRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + proposal(request: QueryProposalRequest): Promise; + proposals(request: QueryProposalsRequest): Promise; + vote(request: QueryVoteRequest): Promise; + votes(request: QueryVotesRequest): Promise; + params(request: QueryParamsRequest): Promise; + deposit(request: QueryDepositRequest): Promise; + deposits(request: QueryDepositsRequest): Promise; + tallyResult(request: QueryTallyResultRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/gov/v1/tx.d.ts b/packages/codegen/dist/cosmos/gov/v1/tx.d.ts new file mode 100644 index 00000000..62602193 --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1/tx.d.ts @@ -0,0 +1,167 @@ +/// +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { VoteOption, WeightedVoteOption, WeightedVoteOptionSDKType } from "./gov"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** + * MsgSubmitProposal defines an sdk.Msg type that supports submitting arbitrary + * proposal Content. + */ +export interface MsgSubmitProposal { + messages: Any[]; + initialDeposit: Coin[]; + proposer: string; + /** metadata is any arbitrary metadata attached to the proposal. */ + metadata: string; +} +/** + * MsgSubmitProposal defines an sdk.Msg type that supports submitting arbitrary + * proposal Content. + */ +export interface MsgSubmitProposalSDKType { + messages: AnySDKType[]; + initial_deposit: CoinSDKType[]; + proposer: string; + metadata: string; +} +/** MsgSubmitProposalResponse defines the Msg/SubmitProposal response type. */ +export interface MsgSubmitProposalResponse { + proposalId: Long; +} +/** MsgSubmitProposalResponse defines the Msg/SubmitProposal response type. */ +export interface MsgSubmitProposalResponseSDKType { + proposal_id: Long; +} +/** + * MsgExecLegacyContent is used to wrap the legacy content field into a message. + * This ensures backwards compatibility with v1beta1.MsgSubmitProposal. + */ +export interface MsgExecLegacyContent { + /** content is the proposal's content. */ + content?: Any; + /** authority must be the gov module address. */ + authority: string; +} +/** + * MsgExecLegacyContent is used to wrap the legacy content field into a message. + * This ensures backwards compatibility with v1beta1.MsgSubmitProposal. + */ +export interface MsgExecLegacyContentSDKType { + content?: AnySDKType; + authority: string; +} +/** MsgExecLegacyContentResponse defines the Msg/ExecLegacyContent response type. */ +export interface MsgExecLegacyContentResponse { +} +/** MsgExecLegacyContentResponse defines the Msg/ExecLegacyContent response type. */ +export interface MsgExecLegacyContentResponseSDKType { +} +/** MsgVote defines a message to cast a vote. */ +export interface MsgVote { + proposalId: Long; + voter: string; + option: VoteOption; + metadata: string; +} +/** MsgVote defines a message to cast a vote. */ +export interface MsgVoteSDKType { + proposal_id: Long; + voter: string; + option: VoteOption; + metadata: string; +} +/** MsgVoteResponse defines the Msg/Vote response type. */ +export interface MsgVoteResponse { +} +/** MsgVoteResponse defines the Msg/Vote response type. */ +export interface MsgVoteResponseSDKType { +} +/** MsgVoteWeighted defines a message to cast a vote. */ +export interface MsgVoteWeighted { + proposalId: Long; + voter: string; + options: WeightedVoteOption[]; + metadata: string; +} +/** MsgVoteWeighted defines a message to cast a vote. */ +export interface MsgVoteWeightedSDKType { + proposal_id: Long; + voter: string; + options: WeightedVoteOptionSDKType[]; + metadata: string; +} +/** MsgVoteWeightedResponse defines the Msg/VoteWeighted response type. */ +export interface MsgVoteWeightedResponse { +} +/** MsgVoteWeightedResponse defines the Msg/VoteWeighted response type. */ +export interface MsgVoteWeightedResponseSDKType { +} +/** MsgDeposit defines a message to submit a deposit to an existing proposal. */ +export interface MsgDeposit { + proposalId: Long; + depositor: string; + amount: Coin[]; +} +/** MsgDeposit defines a message to submit a deposit to an existing proposal. */ +export interface MsgDepositSDKType { + proposal_id: Long; + depositor: string; + amount: CoinSDKType[]; +} +/** MsgDepositResponse defines the Msg/Deposit response type. */ +export interface MsgDepositResponse { +} +/** MsgDepositResponse defines the Msg/Deposit response type. */ +export interface MsgDepositResponseSDKType { +} +export declare const MsgSubmitProposal: { + encode(message: MsgSubmitProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitProposal; + fromPartial(object: DeepPartial): MsgSubmitProposal; +}; +export declare const MsgSubmitProposalResponse: { + encode(message: MsgSubmitProposalResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitProposalResponse; + fromPartial(object: DeepPartial): MsgSubmitProposalResponse; +}; +export declare const MsgExecLegacyContent: { + encode(message: MsgExecLegacyContent, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExecLegacyContent; + fromPartial(object: DeepPartial): MsgExecLegacyContent; +}; +export declare const MsgExecLegacyContentResponse: { + encode(_: MsgExecLegacyContentResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExecLegacyContentResponse; + fromPartial(_: DeepPartial): MsgExecLegacyContentResponse; +}; +export declare const MsgVote: { + encode(message: MsgVote, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVote; + fromPartial(object: DeepPartial): MsgVote; +}; +export declare const MsgVoteResponse: { + encode(_: MsgVoteResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteResponse; + fromPartial(_: DeepPartial): MsgVoteResponse; +}; +export declare const MsgVoteWeighted: { + encode(message: MsgVoteWeighted, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteWeighted; + fromPartial(object: DeepPartial): MsgVoteWeighted; +}; +export declare const MsgVoteWeightedResponse: { + encode(_: MsgVoteWeightedResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteWeightedResponse; + fromPartial(_: DeepPartial): MsgVoteWeightedResponse; +}; +export declare const MsgDeposit: { + encode(message: MsgDeposit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeposit; + fromPartial(object: DeepPartial): MsgDeposit; +}; +export declare const MsgDepositResponse: { + encode(_: MsgDepositResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDepositResponse; + fromPartial(_: DeepPartial): MsgDepositResponse; +}; diff --git a/packages/codegen/dist/cosmos/gov/v1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/gov/v1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..4624c3f7 --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1/tx.rpc.msg.d.ts @@ -0,0 +1,27 @@ +import { Rpc } from "../../../helpers"; +import { MsgSubmitProposal, MsgSubmitProposalResponse, MsgExecLegacyContent, MsgExecLegacyContentResponse, MsgVote, MsgVoteResponse, MsgVoteWeighted, MsgVoteWeightedResponse, MsgDeposit, MsgDepositResponse } from "./tx"; +/** Msg defines the gov Msg service. */ +export interface Msg { + /** SubmitProposal defines a method to create new proposal given a content. */ + submitProposal(request: MsgSubmitProposal): Promise; + /** + * ExecLegacyContent defines a Msg to be in included in a MsgSubmitProposal + * to execute a legacy content-based proposal. + */ + execLegacyContent(request: MsgExecLegacyContent): Promise; + /** Vote defines a method to add a vote on a specific proposal. */ + vote(request: MsgVote): Promise; + /** VoteWeighted defines a method to add a weighted vote on a specific proposal. */ + voteWeighted(request: MsgVoteWeighted): Promise; + /** Deposit defines a method to add deposit on a specific proposal. */ + deposit(request: MsgDeposit): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + submitProposal(request: MsgSubmitProposal): Promise; + execLegacyContent(request: MsgExecLegacyContent): Promise; + vote(request: MsgVote): Promise; + voteWeighted(request: MsgVoteWeighted): Promise; + deposit(request: MsgDeposit): Promise; +} diff --git a/packages/codegen/dist/cosmos/gov/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/gov/v1beta1/genesis.d.ts new file mode 100644 index 00000000..7d690f57 --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1beta1/genesis.d.ts @@ -0,0 +1,36 @@ +/// +import { Deposit, DepositSDKType, Vote, VoteSDKType, Proposal, ProposalSDKType, DepositParams, DepositParamsSDKType, VotingParams, VotingParamsSDKType, TallyParams, TallyParamsSDKType } from "./gov"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisState defines the gov module's genesis state. */ +export interface GenesisState { + /** starting_proposal_id is the ID of the starting proposal. */ + startingProposalId: Long; + /** deposits defines all the deposits present at genesis. */ + deposits: Deposit[]; + /** votes defines all the votes present at genesis. */ + votes: Vote[]; + /** proposals defines all the proposals present at genesis. */ + proposals: Proposal[]; + /** params defines all the paramaters of related to deposit. */ + depositParams?: DepositParams; + /** params defines all the paramaters of related to voting. */ + votingParams?: VotingParams; + /** params defines all the paramaters of related to tally. */ + tallyParams?: TallyParams; +} +/** GenesisState defines the gov module's genesis state. */ +export interface GenesisStateSDKType { + starting_proposal_id: Long; + deposits: DepositSDKType[]; + votes: VoteSDKType[]; + proposals: ProposalSDKType[]; + deposit_params?: DepositParamsSDKType; + voting_params?: VotingParamsSDKType; + tally_params?: TallyParamsSDKType; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/cosmos/gov/v1beta1/gov.d.ts b/packages/codegen/dist/cosmos/gov/v1beta1/gov.d.ts new file mode 100644 index 00000000..6aff75ed --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1beta1/gov.d.ts @@ -0,0 +1,270 @@ +/// +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Duration, DurationSDKType } from "../../../google/protobuf/duration"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** VoteOption enumerates the valid vote options for a given governance proposal. */ +export declare enum VoteOption { + /** VOTE_OPTION_UNSPECIFIED - VOTE_OPTION_UNSPECIFIED defines a no-op vote option. */ + VOTE_OPTION_UNSPECIFIED = 0, + /** VOTE_OPTION_YES - VOTE_OPTION_YES defines a yes vote option. */ + VOTE_OPTION_YES = 1, + /** VOTE_OPTION_ABSTAIN - VOTE_OPTION_ABSTAIN defines an abstain vote option. */ + VOTE_OPTION_ABSTAIN = 2, + /** VOTE_OPTION_NO - VOTE_OPTION_NO defines a no vote option. */ + VOTE_OPTION_NO = 3, + /** VOTE_OPTION_NO_WITH_VETO - VOTE_OPTION_NO_WITH_VETO defines a no with veto vote option. */ + VOTE_OPTION_NO_WITH_VETO = 4, + UNRECOGNIZED = -1 +} +export declare const VoteOptionSDKType: typeof VoteOption; +export declare function voteOptionFromJSON(object: any): VoteOption; +export declare function voteOptionToJSON(object: VoteOption): string; +/** ProposalStatus enumerates the valid statuses of a proposal. */ +export declare enum ProposalStatus { + /** PROPOSAL_STATUS_UNSPECIFIED - PROPOSAL_STATUS_UNSPECIFIED defines the default propopsal status. */ + PROPOSAL_STATUS_UNSPECIFIED = 0, + /** + * PROPOSAL_STATUS_DEPOSIT_PERIOD - PROPOSAL_STATUS_DEPOSIT_PERIOD defines a proposal status during the deposit + * period. + */ + PROPOSAL_STATUS_DEPOSIT_PERIOD = 1, + /** + * PROPOSAL_STATUS_VOTING_PERIOD - PROPOSAL_STATUS_VOTING_PERIOD defines a proposal status during the voting + * period. + */ + PROPOSAL_STATUS_VOTING_PERIOD = 2, + /** + * PROPOSAL_STATUS_PASSED - PROPOSAL_STATUS_PASSED defines a proposal status of a proposal that has + * passed. + */ + PROPOSAL_STATUS_PASSED = 3, + /** + * PROPOSAL_STATUS_REJECTED - PROPOSAL_STATUS_REJECTED defines a proposal status of a proposal that has + * been rejected. + */ + PROPOSAL_STATUS_REJECTED = 4, + /** + * PROPOSAL_STATUS_FAILED - PROPOSAL_STATUS_FAILED defines a proposal status of a proposal that has + * failed. + */ + PROPOSAL_STATUS_FAILED = 5, + UNRECOGNIZED = -1 +} +export declare const ProposalStatusSDKType: typeof ProposalStatus; +export declare function proposalStatusFromJSON(object: any): ProposalStatus; +export declare function proposalStatusToJSON(object: ProposalStatus): string; +/** + * WeightedVoteOption defines a unit of vote for vote split. + * + * Since: cosmos-sdk 0.43 + */ +export interface WeightedVoteOption { + option: VoteOption; + weight: string; +} +/** + * WeightedVoteOption defines a unit of vote for vote split. + * + * Since: cosmos-sdk 0.43 + */ +export interface WeightedVoteOptionSDKType { + option: VoteOption; + weight: string; +} +/** + * TextProposal defines a standard text proposal whose changes need to be + * manually updated in case of approval. + */ +export interface TextProposal { + title: string; + description: string; +} +/** + * TextProposal defines a standard text proposal whose changes need to be + * manually updated in case of approval. + */ +export interface TextProposalSDKType { + title: string; + description: string; +} +/** + * Deposit defines an amount deposited by an account address to an active + * proposal. + */ +export interface Deposit { + proposalId: Long; + depositor: string; + amount: Coin[]; +} +/** + * Deposit defines an amount deposited by an account address to an active + * proposal. + */ +export interface DepositSDKType { + proposal_id: Long; + depositor: string; + amount: CoinSDKType[]; +} +/** Proposal defines the core field members of a governance proposal. */ +export interface Proposal { + proposalId: Long; + content?: Any; + status: ProposalStatus; + /** + * final_tally_result is the final tally result of the proposal. When + * querying a proposal via gRPC, this field is not populated until the + * proposal's voting period has ended. + */ + finalTallyResult?: TallyResult; + submitTime?: Date; + depositEndTime?: Date; + totalDeposit: Coin[]; + votingStartTime?: Date; + votingEndTime?: Date; +} +/** Proposal defines the core field members of a governance proposal. */ +export interface ProposalSDKType { + proposal_id: Long; + content?: AnySDKType; + status: ProposalStatus; + final_tally_result?: TallyResultSDKType; + submit_time?: Date; + deposit_end_time?: Date; + total_deposit: CoinSDKType[]; + voting_start_time?: Date; + voting_end_time?: Date; +} +/** TallyResult defines a standard tally for a governance proposal. */ +export interface TallyResult { + yes: string; + abstain: string; + no: string; + noWithVeto: string; +} +/** TallyResult defines a standard tally for a governance proposal. */ +export interface TallyResultSDKType { + yes: string; + abstain: string; + no: string; + no_with_veto: string; +} +/** + * Vote defines a vote on a governance proposal. + * A Vote consists of a proposal ID, the voter, and the vote option. + */ +export interface Vote { + proposalId: Long; + voter: string; + /** + * Deprecated: Prefer to use `options` instead. This field is set in queries + * if and only if `len(options) == 1` and that option has weight 1. In all + * other cases, this field will default to VOTE_OPTION_UNSPECIFIED. + */ + /** @deprecated */ + option: VoteOption; + /** Since: cosmos-sdk 0.43 */ + options: WeightedVoteOption[]; +} +/** + * Vote defines a vote on a governance proposal. + * A Vote consists of a proposal ID, the voter, and the vote option. + */ +export interface VoteSDKType { + proposal_id: Long; + voter: string; + /** @deprecated */ + option: VoteOption; + options: WeightedVoteOptionSDKType[]; +} +/** DepositParams defines the params for deposits on governance proposals. */ +export interface DepositParams { + /** Minimum deposit for a proposal to enter voting period. */ + minDeposit: Coin[]; + /** + * Maximum period for Atom holders to deposit on a proposal. Initial value: 2 + * months. + */ + maxDepositPeriod?: Duration; +} +/** DepositParams defines the params for deposits on governance proposals. */ +export interface DepositParamsSDKType { + min_deposit: CoinSDKType[]; + max_deposit_period?: DurationSDKType; +} +/** VotingParams defines the params for voting on governance proposals. */ +export interface VotingParams { + /** Length of the voting period. */ + votingPeriod?: Duration; +} +/** VotingParams defines the params for voting on governance proposals. */ +export interface VotingParamsSDKType { + voting_period?: DurationSDKType; +} +/** TallyParams defines the params for tallying votes on governance proposals. */ +export interface TallyParams { + /** + * Minimum percentage of total stake needed to vote for a result to be + * considered valid. + */ + quorum: Uint8Array; + /** Minimum proportion of Yes votes for proposal to pass. Default value: 0.5. */ + threshold: Uint8Array; + /** + * Minimum value of Veto votes to Total votes ratio for proposal to be + * vetoed. Default value: 1/3. + */ + vetoThreshold: Uint8Array; +} +/** TallyParams defines the params for tallying votes on governance proposals. */ +export interface TallyParamsSDKType { + quorum: Uint8Array; + threshold: Uint8Array; + veto_threshold: Uint8Array; +} +export declare const WeightedVoteOption: { + encode(message: WeightedVoteOption, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): WeightedVoteOption; + fromPartial(object: DeepPartial): WeightedVoteOption; +}; +export declare const TextProposal: { + encode(message: TextProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TextProposal; + fromPartial(object: DeepPartial): TextProposal; +}; +export declare const Deposit: { + encode(message: Deposit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Deposit; + fromPartial(object: DeepPartial): Deposit; +}; +export declare const Proposal: { + encode(message: Proposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Proposal; + fromPartial(object: DeepPartial): Proposal; +}; +export declare const TallyResult: { + encode(message: TallyResult, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TallyResult; + fromPartial(object: DeepPartial): TallyResult; +}; +export declare const Vote: { + encode(message: Vote, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Vote; + fromPartial(object: DeepPartial): Vote; +}; +export declare const DepositParams: { + encode(message: DepositParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DepositParams; + fromPartial(object: DeepPartial): DepositParams; +}; +export declare const VotingParams: { + encode(message: VotingParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): VotingParams; + fromPartial(object: DeepPartial): VotingParams; +}; +export declare const TallyParams: { + encode(message: TallyParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TallyParams; + fromPartial(object: DeepPartial): TallyParams; +}; diff --git a/packages/codegen/dist/cosmos/gov/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/gov/v1beta1/query.d.ts new file mode 100644 index 00000000..05747b9a --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1beta1/query.d.ts @@ -0,0 +1,271 @@ +/// +import { ProposalStatus, Proposal, ProposalSDKType, Vote, VoteSDKType, VotingParams, VotingParamsSDKType, DepositParams, DepositParamsSDKType, TallyParams, TallyParamsSDKType, Deposit, DepositSDKType, TallyResult, TallyResultSDKType } from "./gov"; +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** QueryProposalRequest is the request type for the Query/Proposal RPC method. */ +export interface QueryProposalRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; +} +/** QueryProposalRequest is the request type for the Query/Proposal RPC method. */ +export interface QueryProposalRequestSDKType { + proposal_id: Long; +} +/** QueryProposalResponse is the response type for the Query/Proposal RPC method. */ +export interface QueryProposalResponse { + proposal?: Proposal; +} +/** QueryProposalResponse is the response type for the Query/Proposal RPC method. */ +export interface QueryProposalResponseSDKType { + proposal?: ProposalSDKType; +} +/** QueryProposalsRequest is the request type for the Query/Proposals RPC method. */ +export interface QueryProposalsRequest { + /** proposal_status defines the status of the proposals. */ + proposalStatus: ProposalStatus; + /** voter defines the voter address for the proposals. */ + voter: string; + /** depositor defines the deposit addresses from the proposals. */ + depositor: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryProposalsRequest is the request type for the Query/Proposals RPC method. */ +export interface QueryProposalsRequestSDKType { + proposal_status: ProposalStatus; + voter: string; + depositor: string; + pagination?: PageRequestSDKType; +} +/** + * QueryProposalsResponse is the response type for the Query/Proposals RPC + * method. + */ +export interface QueryProposalsResponse { + proposals: Proposal[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryProposalsResponse is the response type for the Query/Proposals RPC + * method. + */ +export interface QueryProposalsResponseSDKType { + proposals: ProposalSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryVoteRequest is the request type for the Query/Vote RPC method. */ +export interface QueryVoteRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** voter defines the oter address for the proposals. */ + voter: string; +} +/** QueryVoteRequest is the request type for the Query/Vote RPC method. */ +export interface QueryVoteRequestSDKType { + proposal_id: Long; + voter: string; +} +/** QueryVoteResponse is the response type for the Query/Vote RPC method. */ +export interface QueryVoteResponse { + /** vote defined the queried vote. */ + vote?: Vote; +} +/** QueryVoteResponse is the response type for the Query/Vote RPC method. */ +export interface QueryVoteResponseSDKType { + vote?: VoteSDKType; +} +/** QueryVotesRequest is the request type for the Query/Votes RPC method. */ +export interface QueryVotesRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryVotesRequest is the request type for the Query/Votes RPC method. */ +export interface QueryVotesRequestSDKType { + proposal_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryVotesResponse is the response type for the Query/Votes RPC method. */ +export interface QueryVotesResponse { + /** votes defined the queried votes. */ + votes: Vote[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryVotesResponse is the response type for the Query/Votes RPC method. */ +export interface QueryVotesResponseSDKType { + votes: VoteSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequest { + /** + * params_type defines which parameters to query for, can be one of "voting", + * "tallying" or "deposit". + */ + paramsType: string; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequestSDKType { + params_type: string; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** voting_params defines the parameters related to voting. */ + votingParams?: VotingParams; + /** deposit_params defines the parameters related to deposit. */ + depositParams?: DepositParams; + /** tally_params defines the parameters related to tally. */ + tallyParams?: TallyParams; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponseSDKType { + voting_params?: VotingParamsSDKType; + deposit_params?: DepositParamsSDKType; + tally_params?: TallyParamsSDKType; +} +/** QueryDepositRequest is the request type for the Query/Deposit RPC method. */ +export interface QueryDepositRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** depositor defines the deposit addresses from the proposals. */ + depositor: string; +} +/** QueryDepositRequest is the request type for the Query/Deposit RPC method. */ +export interface QueryDepositRequestSDKType { + proposal_id: Long; + depositor: string; +} +/** QueryDepositResponse is the response type for the Query/Deposit RPC method. */ +export interface QueryDepositResponse { + /** deposit defines the requested deposit. */ + deposit?: Deposit; +} +/** QueryDepositResponse is the response type for the Query/Deposit RPC method. */ +export interface QueryDepositResponseSDKType { + deposit?: DepositSDKType; +} +/** QueryDepositsRequest is the request type for the Query/Deposits RPC method. */ +export interface QueryDepositsRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryDepositsRequest is the request type for the Query/Deposits RPC method. */ +export interface QueryDepositsRequestSDKType { + proposal_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryDepositsResponse is the response type for the Query/Deposits RPC method. */ +export interface QueryDepositsResponse { + deposits: Deposit[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryDepositsResponse is the response type for the Query/Deposits RPC method. */ +export interface QueryDepositsResponseSDKType { + deposits: DepositSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryTallyResultRequest is the request type for the Query/Tally RPC method. */ +export interface QueryTallyResultRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; +} +/** QueryTallyResultRequest is the request type for the Query/Tally RPC method. */ +export interface QueryTallyResultRequestSDKType { + proposal_id: Long; +} +/** QueryTallyResultResponse is the response type for the Query/Tally RPC method. */ +export interface QueryTallyResultResponse { + /** tally defines the requested tally. */ + tally?: TallyResult; +} +/** QueryTallyResultResponse is the response type for the Query/Tally RPC method. */ +export interface QueryTallyResultResponseSDKType { + tally?: TallyResultSDKType; +} +export declare const QueryProposalRequest: { + encode(message: QueryProposalRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalRequest; + fromPartial(object: DeepPartial): QueryProposalRequest; +}; +export declare const QueryProposalResponse: { + encode(message: QueryProposalResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalResponse; + fromPartial(object: DeepPartial): QueryProposalResponse; +}; +export declare const QueryProposalsRequest: { + encode(message: QueryProposalsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalsRequest; + fromPartial(object: DeepPartial): QueryProposalsRequest; +}; +export declare const QueryProposalsResponse: { + encode(message: QueryProposalsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalsResponse; + fromPartial(object: DeepPartial): QueryProposalsResponse; +}; +export declare const QueryVoteRequest: { + encode(message: QueryVoteRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVoteRequest; + fromPartial(object: DeepPartial): QueryVoteRequest; +}; +export declare const QueryVoteResponse: { + encode(message: QueryVoteResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVoteResponse; + fromPartial(object: DeepPartial): QueryVoteResponse; +}; +export declare const QueryVotesRequest: { + encode(message: QueryVotesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesRequest; + fromPartial(object: DeepPartial): QueryVotesRequest; +}; +export declare const QueryVotesResponse: { + encode(message: QueryVotesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesResponse; + fromPartial(object: DeepPartial): QueryVotesResponse; +}; +export declare const QueryParamsRequest: { + encode(message: QueryParamsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest; + fromPartial(object: DeepPartial): QueryParamsRequest; +}; +export declare const QueryParamsResponse: { + encode(message: QueryParamsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse; + fromPartial(object: DeepPartial): QueryParamsResponse; +}; +export declare const QueryDepositRequest: { + encode(message: QueryDepositRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositRequest; + fromPartial(object: DeepPartial): QueryDepositRequest; +}; +export declare const QueryDepositResponse: { + encode(message: QueryDepositResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositResponse; + fromPartial(object: DeepPartial): QueryDepositResponse; +}; +export declare const QueryDepositsRequest: { + encode(message: QueryDepositsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositsRequest; + fromPartial(object: DeepPartial): QueryDepositsRequest; +}; +export declare const QueryDepositsResponse: { + encode(message: QueryDepositsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositsResponse; + fromPartial(object: DeepPartial): QueryDepositsResponse; +}; +export declare const QueryTallyResultRequest: { + encode(message: QueryTallyResultRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTallyResultRequest; + fromPartial(object: DeepPartial): QueryTallyResultRequest; +}; +export declare const QueryTallyResultResponse: { + encode(message: QueryTallyResultResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTallyResultResponse; + fromPartial(object: DeepPartial): QueryTallyResultResponse; +}; diff --git a/packages/codegen/dist/cosmos/gov/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/gov/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..9ce44b6a --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1beta1/query.lcd.d.ts @@ -0,0 +1,16 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryProposalRequest, QueryProposalResponseSDKType, QueryProposalsRequest, QueryProposalsResponseSDKType, QueryVoteRequest, QueryVoteResponseSDKType, QueryVotesRequest, QueryVotesResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType, QueryDepositRequest, QueryDepositResponseSDKType, QueryDepositsRequest, QueryDepositsResponseSDKType, QueryTallyResultRequest, QueryTallyResultResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + proposal(params: QueryProposalRequest): Promise; + proposals(params: QueryProposalsRequest): Promise; + vote(params: QueryVoteRequest): Promise; + votes(params: QueryVotesRequest): Promise; + params(params: QueryParamsRequest): Promise; + deposit(params: QueryDepositRequest): Promise; + deposits(params: QueryDepositsRequest): Promise; + tallyResult(params: QueryTallyResultRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/gov/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/gov/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..8152da58 --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,44 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryProposalRequest, QueryProposalResponse, QueryProposalsRequest, QueryProposalsResponse, QueryVoteRequest, QueryVoteResponse, QueryVotesRequest, QueryVotesResponse, QueryParamsRequest, QueryParamsResponse, QueryDepositRequest, QueryDepositResponse, QueryDepositsRequest, QueryDepositsResponse, QueryTallyResultRequest, QueryTallyResultResponse } from "./query"; +/** Query defines the gRPC querier service for gov module */ +export interface Query { + /** Proposal queries proposal details based on ProposalID. */ + proposal(request: QueryProposalRequest): Promise; + /** Proposals queries all proposals based on given status. */ + proposals(request: QueryProposalsRequest): Promise; + /** Vote queries voted information based on proposalID, voterAddr. */ + vote(request: QueryVoteRequest): Promise; + /** Votes queries votes of a given proposal. */ + votes(request: QueryVotesRequest): Promise; + /** Params queries all parameters of the gov module. */ + params(request: QueryParamsRequest): Promise; + /** Deposit queries single deposit information based proposalID, depositAddr. */ + deposit(request: QueryDepositRequest): Promise; + /** Deposits queries all deposits of a single proposal. */ + deposits(request: QueryDepositsRequest): Promise; + /** TallyResult queries the tally of a proposal vote. */ + tallyResult(request: QueryTallyResultRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + proposal(request: QueryProposalRequest): Promise; + proposals(request: QueryProposalsRequest): Promise; + vote(request: QueryVoteRequest): Promise; + votes(request: QueryVotesRequest): Promise; + params(request: QueryParamsRequest): Promise; + deposit(request: QueryDepositRequest): Promise; + deposits(request: QueryDepositsRequest): Promise; + tallyResult(request: QueryTallyResultRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + proposal(request: QueryProposalRequest): Promise; + proposals(request: QueryProposalsRequest): Promise; + vote(request: QueryVoteRequest): Promise; + votes(request: QueryVotesRequest): Promise; + params(request: QueryParamsRequest): Promise; + deposit(request: QueryDepositRequest): Promise; + deposits(request: QueryDepositsRequest): Promise; + tallyResult(request: QueryTallyResultRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/gov/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/gov/v1beta1/tx.d.ts new file mode 100644 index 00000000..588bcdb7 --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1beta1/tx.d.ts @@ -0,0 +1,142 @@ +/// +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { VoteOption, WeightedVoteOption, WeightedVoteOptionSDKType } from "./gov"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** + * MsgSubmitProposal defines an sdk.Msg type that supports submitting arbitrary + * proposal Content. + */ +export interface MsgSubmitProposal { + content?: Any; + initialDeposit: Coin[]; + proposer: string; +} +/** + * MsgSubmitProposal defines an sdk.Msg type that supports submitting arbitrary + * proposal Content. + */ +export interface MsgSubmitProposalSDKType { + content?: AnySDKType; + initial_deposit: CoinSDKType[]; + proposer: string; +} +/** MsgSubmitProposalResponse defines the Msg/SubmitProposal response type. */ +export interface MsgSubmitProposalResponse { + proposalId: Long; +} +/** MsgSubmitProposalResponse defines the Msg/SubmitProposal response type. */ +export interface MsgSubmitProposalResponseSDKType { + proposal_id: Long; +} +/** MsgVote defines a message to cast a vote. */ +export interface MsgVote { + proposalId: Long; + voter: string; + option: VoteOption; +} +/** MsgVote defines a message to cast a vote. */ +export interface MsgVoteSDKType { + proposal_id: Long; + voter: string; + option: VoteOption; +} +/** MsgVoteResponse defines the Msg/Vote response type. */ +export interface MsgVoteResponse { +} +/** MsgVoteResponse defines the Msg/Vote response type. */ +export interface MsgVoteResponseSDKType { +} +/** + * MsgVoteWeighted defines a message to cast a vote. + * + * Since: cosmos-sdk 0.43 + */ +export interface MsgVoteWeighted { + proposalId: Long; + voter: string; + options: WeightedVoteOption[]; +} +/** + * MsgVoteWeighted defines a message to cast a vote. + * + * Since: cosmos-sdk 0.43 + */ +export interface MsgVoteWeightedSDKType { + proposal_id: Long; + voter: string; + options: WeightedVoteOptionSDKType[]; +} +/** + * MsgVoteWeightedResponse defines the Msg/VoteWeighted response type. + * + * Since: cosmos-sdk 0.43 + */ +export interface MsgVoteWeightedResponse { +} +/** + * MsgVoteWeightedResponse defines the Msg/VoteWeighted response type. + * + * Since: cosmos-sdk 0.43 + */ +export interface MsgVoteWeightedResponseSDKType { +} +/** MsgDeposit defines a message to submit a deposit to an existing proposal. */ +export interface MsgDeposit { + proposalId: Long; + depositor: string; + amount: Coin[]; +} +/** MsgDeposit defines a message to submit a deposit to an existing proposal. */ +export interface MsgDepositSDKType { + proposal_id: Long; + depositor: string; + amount: CoinSDKType[]; +} +/** MsgDepositResponse defines the Msg/Deposit response type. */ +export interface MsgDepositResponse { +} +/** MsgDepositResponse defines the Msg/Deposit response type. */ +export interface MsgDepositResponseSDKType { +} +export declare const MsgSubmitProposal: { + encode(message: MsgSubmitProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitProposal; + fromPartial(object: DeepPartial): MsgSubmitProposal; +}; +export declare const MsgSubmitProposalResponse: { + encode(message: MsgSubmitProposalResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitProposalResponse; + fromPartial(object: DeepPartial): MsgSubmitProposalResponse; +}; +export declare const MsgVote: { + encode(message: MsgVote, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVote; + fromPartial(object: DeepPartial): MsgVote; +}; +export declare const MsgVoteResponse: { + encode(_: MsgVoteResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteResponse; + fromPartial(_: DeepPartial): MsgVoteResponse; +}; +export declare const MsgVoteWeighted: { + encode(message: MsgVoteWeighted, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteWeighted; + fromPartial(object: DeepPartial): MsgVoteWeighted; +}; +export declare const MsgVoteWeightedResponse: { + encode(_: MsgVoteWeightedResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteWeightedResponse; + fromPartial(_: DeepPartial): MsgVoteWeightedResponse; +}; +export declare const MsgDeposit: { + encode(message: MsgDeposit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeposit; + fromPartial(object: DeepPartial): MsgDeposit; +}; +export declare const MsgDepositResponse: { + encode(_: MsgDepositResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDepositResponse; + fromPartial(_: DeepPartial): MsgDepositResponse; +}; diff --git a/packages/codegen/dist/cosmos/gov/v1beta1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/gov/v1beta1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..0fc403bc --- /dev/null +++ b/packages/codegen/dist/cosmos/gov/v1beta1/tx.rpc.msg.d.ts @@ -0,0 +1,25 @@ +import { Rpc } from "../../../helpers"; +import { MsgSubmitProposal, MsgSubmitProposalResponse, MsgVote, MsgVoteResponse, MsgVoteWeighted, MsgVoteWeightedResponse, MsgDeposit, MsgDepositResponse } from "./tx"; +/** Msg defines the bank Msg service. */ +export interface Msg { + /** SubmitProposal defines a method to create new proposal given a content. */ + submitProposal(request: MsgSubmitProposal): Promise; + /** Vote defines a method to add a vote on a specific proposal. */ + vote(request: MsgVote): Promise; + /** + * VoteWeighted defines a method to add a weighted vote on a specific proposal. + * + * Since: cosmos-sdk 0.43 + */ + voteWeighted(request: MsgVoteWeighted): Promise; + /** Deposit defines a method to add deposit on a specific proposal. */ + deposit(request: MsgDeposit): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + submitProposal(request: MsgSubmitProposal): Promise; + vote(request: MsgVote): Promise; + voteWeighted(request: MsgVoteWeighted): Promise; + deposit(request: MsgDeposit): Promise; +} diff --git a/packages/codegen/dist/cosmos/group/v1/events.d.ts b/packages/codegen/dist/cosmos/group/v1/events.d.ts new file mode 100644 index 00000000..f22267e6 --- /dev/null +++ b/packages/codegen/dist/cosmos/group/v1/events.d.ts @@ -0,0 +1,136 @@ +/// +import { ProposalExecutorResult } from "./types"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** EventCreateGroup is an event emitted when a group is created. */ +export interface EventCreateGroup { + /** group_id is the unique ID of the group. */ + groupId: Long; +} +/** EventCreateGroup is an event emitted when a group is created. */ +export interface EventCreateGroupSDKType { + group_id: Long; +} +/** EventUpdateGroup is an event emitted when a group is updated. */ +export interface EventUpdateGroup { + /** group_id is the unique ID of the group. */ + groupId: Long; +} +/** EventUpdateGroup is an event emitted when a group is updated. */ +export interface EventUpdateGroupSDKType { + group_id: Long; +} +/** EventCreateGroupPolicy is an event emitted when a group policy is created. */ +export interface EventCreateGroupPolicy { + /** address is the account address of the group policy. */ + address: string; +} +/** EventCreateGroupPolicy is an event emitted when a group policy is created. */ +export interface EventCreateGroupPolicySDKType { + address: string; +} +/** EventUpdateGroupPolicy is an event emitted when a group policy is updated. */ +export interface EventUpdateGroupPolicy { + /** address is the account address of the group policy. */ + address: string; +} +/** EventUpdateGroupPolicy is an event emitted when a group policy is updated. */ +export interface EventUpdateGroupPolicySDKType { + address: string; +} +/** EventSubmitProposal is an event emitted when a proposal is created. */ +export interface EventSubmitProposal { + /** proposal_id is the unique ID of the proposal. */ + proposalId: Long; +} +/** EventSubmitProposal is an event emitted when a proposal is created. */ +export interface EventSubmitProposalSDKType { + proposal_id: Long; +} +/** EventWithdrawProposal is an event emitted when a proposal is withdrawn. */ +export interface EventWithdrawProposal { + /** proposal_id is the unique ID of the proposal. */ + proposalId: Long; +} +/** EventWithdrawProposal is an event emitted when a proposal is withdrawn. */ +export interface EventWithdrawProposalSDKType { + proposal_id: Long; +} +/** EventVote is an event emitted when a voter votes on a proposal. */ +export interface EventVote { + /** proposal_id is the unique ID of the proposal. */ + proposalId: Long; +} +/** EventVote is an event emitted when a voter votes on a proposal. */ +export interface EventVoteSDKType { + proposal_id: Long; +} +/** EventExec is an event emitted when a proposal is executed. */ +export interface EventExec { + /** proposal_id is the unique ID of the proposal. */ + proposalId: Long; + /** result is the proposal execution result. */ + result: ProposalExecutorResult; +} +/** EventExec is an event emitted when a proposal is executed. */ +export interface EventExecSDKType { + proposal_id: Long; + result: ProposalExecutorResult; +} +/** EventLeaveGroup is an event emitted when group member leaves the group. */ +export interface EventLeaveGroup { + /** group_id is the unique ID of the group. */ + groupId: Long; + /** address is the account address of the group member. */ + address: string; +} +/** EventLeaveGroup is an event emitted when group member leaves the group. */ +export interface EventLeaveGroupSDKType { + group_id: Long; + address: string; +} +export declare const EventCreateGroup: { + encode(message: EventCreateGroup, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventCreateGroup; + fromPartial(object: DeepPartial): EventCreateGroup; +}; +export declare const EventUpdateGroup: { + encode(message: EventUpdateGroup, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventUpdateGroup; + fromPartial(object: DeepPartial): EventUpdateGroup; +}; +export declare const EventCreateGroupPolicy: { + encode(message: EventCreateGroupPolicy, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventCreateGroupPolicy; + fromPartial(object: DeepPartial): EventCreateGroupPolicy; +}; +export declare const EventUpdateGroupPolicy: { + encode(message: EventUpdateGroupPolicy, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventUpdateGroupPolicy; + fromPartial(object: DeepPartial): EventUpdateGroupPolicy; +}; +export declare const EventSubmitProposal: { + encode(message: EventSubmitProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventSubmitProposal; + fromPartial(object: DeepPartial): EventSubmitProposal; +}; +export declare const EventWithdrawProposal: { + encode(message: EventWithdrawProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventWithdrawProposal; + fromPartial(object: DeepPartial): EventWithdrawProposal; +}; +export declare const EventVote: { + encode(message: EventVote, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventVote; + fromPartial(object: DeepPartial): EventVote; +}; +export declare const EventExec: { + encode(message: EventExec, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventExec; + fromPartial(object: DeepPartial): EventExec; +}; +export declare const EventLeaveGroup: { + encode(message: EventLeaveGroup, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventLeaveGroup; + fromPartial(object: DeepPartial): EventLeaveGroup; +}; diff --git a/packages/codegen/dist/cosmos/group/v1/genesis.d.ts b/packages/codegen/dist/cosmos/group/v1/genesis.d.ts new file mode 100644 index 00000000..0d6d55aa --- /dev/null +++ b/packages/codegen/dist/cosmos/group/v1/genesis.d.ts @@ -0,0 +1,48 @@ +/// +import { GroupInfo, GroupInfoSDKType, GroupMember, GroupMemberSDKType, GroupPolicyInfo, GroupPolicyInfoSDKType, Proposal, ProposalSDKType, Vote, VoteSDKType } from "./types"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisState defines the group module's genesis state. */ +export interface GenesisState { + /** + * group_seq is the group table orm.Sequence, + * it is used to get the next group ID. + */ + groupSeq: Long; + /** groups is the list of groups info. */ + groups: GroupInfo[]; + /** group_members is the list of groups members. */ + groupMembers: GroupMember[]; + /** + * group_policy_seq is the group policy table orm.Sequence, + * it is used to generate the next group policy account address. + */ + groupPolicySeq: Long; + /** group_policies is the list of group policies info. */ + groupPolicies: GroupPolicyInfo[]; + /** + * proposal_seq is the proposal table orm.Sequence, + * it is used to get the next proposal ID. + */ + proposalSeq: Long; + /** proposals is the list of proposals. */ + proposals: Proposal[]; + /** votes is the list of votes. */ + votes: Vote[]; +} +/** GenesisState defines the group module's genesis state. */ +export interface GenesisStateSDKType { + group_seq: Long; + groups: GroupInfoSDKType[]; + group_members: GroupMemberSDKType[]; + group_policy_seq: Long; + group_policies: GroupPolicyInfoSDKType[]; + proposal_seq: Long; + proposals: ProposalSDKType[]; + votes: VoteSDKType[]; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/cosmos/group/v1/query.d.ts b/packages/codegen/dist/cosmos/group/v1/query.d.ts new file mode 100644 index 00000000..8f539851 --- /dev/null +++ b/packages/codegen/dist/cosmos/group/v1/query.d.ts @@ -0,0 +1,420 @@ +/// +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { GroupInfo, GroupInfoSDKType, GroupPolicyInfo, GroupPolicyInfoSDKType, GroupMember, GroupMemberSDKType, Proposal, ProposalSDKType, Vote, VoteSDKType, TallyResult, TallyResultSDKType } from "./types"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** QueryGroupInfoRequest is the Query/GroupInfo request type. */ +export interface QueryGroupInfoRequest { + /** group_id is the unique ID of the group. */ + groupId: Long; +} +/** QueryGroupInfoRequest is the Query/GroupInfo request type. */ +export interface QueryGroupInfoRequestSDKType { + group_id: Long; +} +/** QueryGroupInfoResponse is the Query/GroupInfo response type. */ +export interface QueryGroupInfoResponse { + /** info is the GroupInfo for the group. */ + info?: GroupInfo; +} +/** QueryGroupInfoResponse is the Query/GroupInfo response type. */ +export interface QueryGroupInfoResponseSDKType { + info?: GroupInfoSDKType; +} +/** QueryGroupPolicyInfoRequest is the Query/GroupPolicyInfo request type. */ +export interface QueryGroupPolicyInfoRequest { + /** address is the account address of the group policy. */ + address: string; +} +/** QueryGroupPolicyInfoRequest is the Query/GroupPolicyInfo request type. */ +export interface QueryGroupPolicyInfoRequestSDKType { + address: string; +} +/** QueryGroupPolicyInfoResponse is the Query/GroupPolicyInfo response type. */ +export interface QueryGroupPolicyInfoResponse { + /** info is the GroupPolicyInfo for the group policy. */ + info?: GroupPolicyInfo; +} +/** QueryGroupPolicyInfoResponse is the Query/GroupPolicyInfo response type. */ +export interface QueryGroupPolicyInfoResponseSDKType { + info?: GroupPolicyInfoSDKType; +} +/** QueryGroupMembersRequest is the Query/GroupMembers request type. */ +export interface QueryGroupMembersRequest { + /** group_id is the unique ID of the group. */ + groupId: Long; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryGroupMembersRequest is the Query/GroupMembers request type. */ +export interface QueryGroupMembersRequestSDKType { + group_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryGroupMembersResponse is the Query/GroupMembersResponse response type. */ +export interface QueryGroupMembersResponse { + /** members are the members of the group with given group_id. */ + members: GroupMember[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryGroupMembersResponse is the Query/GroupMembersResponse response type. */ +export interface QueryGroupMembersResponseSDKType { + members: GroupMemberSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryGroupsByAdminRequest is the Query/GroupsByAdmin request type. */ +export interface QueryGroupsByAdminRequest { + /** admin is the account address of a group's admin. */ + admin: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryGroupsByAdminRequest is the Query/GroupsByAdmin request type. */ +export interface QueryGroupsByAdminRequestSDKType { + admin: string; + pagination?: PageRequestSDKType; +} +/** QueryGroupsByAdminResponse is the Query/GroupsByAdminResponse response type. */ +export interface QueryGroupsByAdminResponse { + /** groups are the groups info with the provided admin. */ + groups: GroupInfo[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryGroupsByAdminResponse is the Query/GroupsByAdminResponse response type. */ +export interface QueryGroupsByAdminResponseSDKType { + groups: GroupInfoSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryGroupPoliciesByGroupRequest is the Query/GroupPoliciesByGroup request type. */ +export interface QueryGroupPoliciesByGroupRequest { + /** group_id is the unique ID of the group policy's group. */ + groupId: Long; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryGroupPoliciesByGroupRequest is the Query/GroupPoliciesByGroup request type. */ +export interface QueryGroupPoliciesByGroupRequestSDKType { + group_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryGroupPoliciesByGroupResponse is the Query/GroupPoliciesByGroup response type. */ +export interface QueryGroupPoliciesByGroupResponse { + /** group_policies are the group policies info associated with the provided group. */ + groupPolicies: GroupPolicyInfo[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryGroupPoliciesByGroupResponse is the Query/GroupPoliciesByGroup response type. */ +export interface QueryGroupPoliciesByGroupResponseSDKType { + group_policies: GroupPolicyInfoSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryGroupPoliciesByAdminRequest is the Query/GroupPoliciesByAdmin request type. */ +export interface QueryGroupPoliciesByAdminRequest { + /** admin is the admin address of the group policy. */ + admin: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryGroupPoliciesByAdminRequest is the Query/GroupPoliciesByAdmin request type. */ +export interface QueryGroupPoliciesByAdminRequestSDKType { + admin: string; + pagination?: PageRequestSDKType; +} +/** QueryGroupPoliciesByAdminResponse is the Query/GroupPoliciesByAdmin response type. */ +export interface QueryGroupPoliciesByAdminResponse { + /** group_policies are the group policies info with provided admin. */ + groupPolicies: GroupPolicyInfo[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryGroupPoliciesByAdminResponse is the Query/GroupPoliciesByAdmin response type. */ +export interface QueryGroupPoliciesByAdminResponseSDKType { + group_policies: GroupPolicyInfoSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryProposalRequest is the Query/Proposal request type. */ +export interface QueryProposalRequest { + /** proposal_id is the unique ID of a proposal. */ + proposalId: Long; +} +/** QueryProposalRequest is the Query/Proposal request type. */ +export interface QueryProposalRequestSDKType { + proposal_id: Long; +} +/** QueryProposalResponse is the Query/Proposal response type. */ +export interface QueryProposalResponse { + /** proposal is the proposal info. */ + proposal?: Proposal; +} +/** QueryProposalResponse is the Query/Proposal response type. */ +export interface QueryProposalResponseSDKType { + proposal?: ProposalSDKType; +} +/** QueryProposalsByGroupPolicyRequest is the Query/ProposalByGroupPolicy request type. */ +export interface QueryProposalsByGroupPolicyRequest { + /** address is the account address of the group policy related to proposals. */ + address: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryProposalsByGroupPolicyRequest is the Query/ProposalByGroupPolicy request type. */ +export interface QueryProposalsByGroupPolicyRequestSDKType { + address: string; + pagination?: PageRequestSDKType; +} +/** QueryProposalsByGroupPolicyResponse is the Query/ProposalByGroupPolicy response type. */ +export interface QueryProposalsByGroupPolicyResponse { + /** proposals are the proposals with given group policy. */ + proposals: Proposal[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryProposalsByGroupPolicyResponse is the Query/ProposalByGroupPolicy response type. */ +export interface QueryProposalsByGroupPolicyResponseSDKType { + proposals: ProposalSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryVoteByProposalVoterRequest is the Query/VoteByProposalVoter request type. */ +export interface QueryVoteByProposalVoterRequest { + /** proposal_id is the unique ID of a proposal. */ + proposalId: Long; + /** voter is a proposal voter account address. */ + voter: string; +} +/** QueryVoteByProposalVoterRequest is the Query/VoteByProposalVoter request type. */ +export interface QueryVoteByProposalVoterRequestSDKType { + proposal_id: Long; + voter: string; +} +/** QueryVoteByProposalVoterResponse is the Query/VoteByProposalVoter response type. */ +export interface QueryVoteByProposalVoterResponse { + /** vote is the vote with given proposal_id and voter. */ + vote?: Vote; +} +/** QueryVoteByProposalVoterResponse is the Query/VoteByProposalVoter response type. */ +export interface QueryVoteByProposalVoterResponseSDKType { + vote?: VoteSDKType; +} +/** QueryVotesByProposalRequest is the Query/VotesByProposal request type. */ +export interface QueryVotesByProposalRequest { + /** proposal_id is the unique ID of a proposal. */ + proposalId: Long; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryVotesByProposalRequest is the Query/VotesByProposal request type. */ +export interface QueryVotesByProposalRequestSDKType { + proposal_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryVotesByProposalResponse is the Query/VotesByProposal response type. */ +export interface QueryVotesByProposalResponse { + /** votes are the list of votes for given proposal_id. */ + votes: Vote[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryVotesByProposalResponse is the Query/VotesByProposal response type. */ +export interface QueryVotesByProposalResponseSDKType { + votes: VoteSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryVotesByVoterRequest is the Query/VotesByVoter request type. */ +export interface QueryVotesByVoterRequest { + /** voter is a proposal voter account address. */ + voter: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryVotesByVoterRequest is the Query/VotesByVoter request type. */ +export interface QueryVotesByVoterRequestSDKType { + voter: string; + pagination?: PageRequestSDKType; +} +/** QueryVotesByVoterResponse is the Query/VotesByVoter response type. */ +export interface QueryVotesByVoterResponse { + /** votes are the list of votes by given voter. */ + votes: Vote[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryVotesByVoterResponse is the Query/VotesByVoter response type. */ +export interface QueryVotesByVoterResponseSDKType { + votes: VoteSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryGroupsByMemberRequest is the Query/GroupsByMember request type. */ +export interface QueryGroupsByMemberRequest { + /** address is the group member address. */ + address: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryGroupsByMemberRequest is the Query/GroupsByMember request type. */ +export interface QueryGroupsByMemberRequestSDKType { + address: string; + pagination?: PageRequestSDKType; +} +/** QueryGroupsByMemberResponse is the Query/GroupsByMember response type. */ +export interface QueryGroupsByMemberResponse { + /** groups are the groups info with the provided group member. */ + groups: GroupInfo[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryGroupsByMemberResponse is the Query/GroupsByMember response type. */ +export interface QueryGroupsByMemberResponseSDKType { + groups: GroupInfoSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryTallyResultRequest is the Query/TallyResult request type. */ +export interface QueryTallyResultRequest { + /** proposal_id is the unique id of a proposal. */ + proposalId: Long; +} +/** QueryTallyResultRequest is the Query/TallyResult request type. */ +export interface QueryTallyResultRequestSDKType { + proposal_id: Long; +} +/** QueryTallyResultResponse is the Query/TallyResult response type. */ +export interface QueryTallyResultResponse { + /** tally defines the requested tally. */ + tally?: TallyResult; +} +/** QueryTallyResultResponse is the Query/TallyResult response type. */ +export interface QueryTallyResultResponseSDKType { + tally?: TallyResultSDKType; +} +export declare const QueryGroupInfoRequest: { + encode(message: QueryGroupInfoRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupInfoRequest; + fromPartial(object: DeepPartial): QueryGroupInfoRequest; +}; +export declare const QueryGroupInfoResponse: { + encode(message: QueryGroupInfoResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupInfoResponse; + fromPartial(object: DeepPartial): QueryGroupInfoResponse; +}; +export declare const QueryGroupPolicyInfoRequest: { + encode(message: QueryGroupPolicyInfoRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupPolicyInfoRequest; + fromPartial(object: DeepPartial): QueryGroupPolicyInfoRequest; +}; +export declare const QueryGroupPolicyInfoResponse: { + encode(message: QueryGroupPolicyInfoResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupPolicyInfoResponse; + fromPartial(object: DeepPartial): QueryGroupPolicyInfoResponse; +}; +export declare const QueryGroupMembersRequest: { + encode(message: QueryGroupMembersRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupMembersRequest; + fromPartial(object: DeepPartial): QueryGroupMembersRequest; +}; +export declare const QueryGroupMembersResponse: { + encode(message: QueryGroupMembersResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupMembersResponse; + fromPartial(object: DeepPartial): QueryGroupMembersResponse; +}; +export declare const QueryGroupsByAdminRequest: { + encode(message: QueryGroupsByAdminRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupsByAdminRequest; + fromPartial(object: DeepPartial): QueryGroupsByAdminRequest; +}; +export declare const QueryGroupsByAdminResponse: { + encode(message: QueryGroupsByAdminResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupsByAdminResponse; + fromPartial(object: DeepPartial): QueryGroupsByAdminResponse; +}; +export declare const QueryGroupPoliciesByGroupRequest: { + encode(message: QueryGroupPoliciesByGroupRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupPoliciesByGroupRequest; + fromPartial(object: DeepPartial): QueryGroupPoliciesByGroupRequest; +}; +export declare const QueryGroupPoliciesByGroupResponse: { + encode(message: QueryGroupPoliciesByGroupResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupPoliciesByGroupResponse; + fromPartial(object: DeepPartial): QueryGroupPoliciesByGroupResponse; +}; +export declare const QueryGroupPoliciesByAdminRequest: { + encode(message: QueryGroupPoliciesByAdminRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupPoliciesByAdminRequest; + fromPartial(object: DeepPartial): QueryGroupPoliciesByAdminRequest; +}; +export declare const QueryGroupPoliciesByAdminResponse: { + encode(message: QueryGroupPoliciesByAdminResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupPoliciesByAdminResponse; + fromPartial(object: DeepPartial): QueryGroupPoliciesByAdminResponse; +}; +export declare const QueryProposalRequest: { + encode(message: QueryProposalRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalRequest; + fromPartial(object: DeepPartial): QueryProposalRequest; +}; +export declare const QueryProposalResponse: { + encode(message: QueryProposalResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalResponse; + fromPartial(object: DeepPartial): QueryProposalResponse; +}; +export declare const QueryProposalsByGroupPolicyRequest: { + encode(message: QueryProposalsByGroupPolicyRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalsByGroupPolicyRequest; + fromPartial(object: DeepPartial): QueryProposalsByGroupPolicyRequest; +}; +export declare const QueryProposalsByGroupPolicyResponse: { + encode(message: QueryProposalsByGroupPolicyResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalsByGroupPolicyResponse; + fromPartial(object: DeepPartial): QueryProposalsByGroupPolicyResponse; +}; +export declare const QueryVoteByProposalVoterRequest: { + encode(message: QueryVoteByProposalVoterRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVoteByProposalVoterRequest; + fromPartial(object: DeepPartial): QueryVoteByProposalVoterRequest; +}; +export declare const QueryVoteByProposalVoterResponse: { + encode(message: QueryVoteByProposalVoterResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVoteByProposalVoterResponse; + fromPartial(object: DeepPartial): QueryVoteByProposalVoterResponse; +}; +export declare const QueryVotesByProposalRequest: { + encode(message: QueryVotesByProposalRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesByProposalRequest; + fromPartial(object: DeepPartial): QueryVotesByProposalRequest; +}; +export declare const QueryVotesByProposalResponse: { + encode(message: QueryVotesByProposalResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesByProposalResponse; + fromPartial(object: DeepPartial): QueryVotesByProposalResponse; +}; +export declare const QueryVotesByVoterRequest: { + encode(message: QueryVotesByVoterRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesByVoterRequest; + fromPartial(object: DeepPartial): QueryVotesByVoterRequest; +}; +export declare const QueryVotesByVoterResponse: { + encode(message: QueryVotesByVoterResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesByVoterResponse; + fromPartial(object: DeepPartial): QueryVotesByVoterResponse; +}; +export declare const QueryGroupsByMemberRequest: { + encode(message: QueryGroupsByMemberRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupsByMemberRequest; + fromPartial(object: DeepPartial): QueryGroupsByMemberRequest; +}; +export declare const QueryGroupsByMemberResponse: { + encode(message: QueryGroupsByMemberResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupsByMemberResponse; + fromPartial(object: DeepPartial): QueryGroupsByMemberResponse; +}; +export declare const QueryTallyResultRequest: { + encode(message: QueryTallyResultRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTallyResultRequest; + fromPartial(object: DeepPartial): QueryTallyResultRequest; +}; +export declare const QueryTallyResultResponse: { + encode(message: QueryTallyResultResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTallyResultResponse; + fromPartial(object: DeepPartial): QueryTallyResultResponse; +}; diff --git a/packages/codegen/dist/cosmos/group/v1/query.lcd.d.ts b/packages/codegen/dist/cosmos/group/v1/query.lcd.d.ts new file mode 100644 index 00000000..eb9b1ffe --- /dev/null +++ b/packages/codegen/dist/cosmos/group/v1/query.lcd.d.ts @@ -0,0 +1,21 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryGroupInfoRequest, QueryGroupInfoResponseSDKType, QueryGroupPolicyInfoRequest, QueryGroupPolicyInfoResponseSDKType, QueryGroupMembersRequest, QueryGroupMembersResponseSDKType, QueryGroupsByAdminRequest, QueryGroupsByAdminResponseSDKType, QueryGroupPoliciesByGroupRequest, QueryGroupPoliciesByGroupResponseSDKType, QueryGroupPoliciesByAdminRequest, QueryGroupPoliciesByAdminResponseSDKType, QueryProposalRequest, QueryProposalResponseSDKType, QueryProposalsByGroupPolicyRequest, QueryProposalsByGroupPolicyResponseSDKType, QueryVoteByProposalVoterRequest, QueryVoteByProposalVoterResponseSDKType, QueryVotesByProposalRequest, QueryVotesByProposalResponseSDKType, QueryVotesByVoterRequest, QueryVotesByVoterResponseSDKType, QueryGroupsByMemberRequest, QueryGroupsByMemberResponseSDKType, QueryTallyResultRequest, QueryTallyResultResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + groupInfo(params: QueryGroupInfoRequest): Promise; + groupPolicyInfo(params: QueryGroupPolicyInfoRequest): Promise; + groupMembers(params: QueryGroupMembersRequest): Promise; + groupsByAdmin(params: QueryGroupsByAdminRequest): Promise; + groupPoliciesByGroup(params: QueryGroupPoliciesByGroupRequest): Promise; + groupPoliciesByAdmin(params: QueryGroupPoliciesByAdminRequest): Promise; + proposal(params: QueryProposalRequest): Promise; + proposalsByGroupPolicy(params: QueryProposalsByGroupPolicyRequest): Promise; + voteByProposalVoter(params: QueryVoteByProposalVoterRequest): Promise; + votesByProposal(params: QueryVotesByProposalRequest): Promise; + votesByVoter(params: QueryVotesByVoterRequest): Promise; + groupsByMember(params: QueryGroupsByMemberRequest): Promise; + tallyResult(params: QueryTallyResultRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/group/v1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/group/v1/query.rpc.Query.d.ts new file mode 100644 index 00000000..2be3770a --- /dev/null +++ b/packages/codegen/dist/cosmos/group/v1/query.rpc.Query.d.ts @@ -0,0 +1,64 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryGroupInfoRequest, QueryGroupInfoResponse, QueryGroupPolicyInfoRequest, QueryGroupPolicyInfoResponse, QueryGroupMembersRequest, QueryGroupMembersResponse, QueryGroupsByAdminRequest, QueryGroupsByAdminResponse, QueryGroupPoliciesByGroupRequest, QueryGroupPoliciesByGroupResponse, QueryGroupPoliciesByAdminRequest, QueryGroupPoliciesByAdminResponse, QueryProposalRequest, QueryProposalResponse, QueryProposalsByGroupPolicyRequest, QueryProposalsByGroupPolicyResponse, QueryVoteByProposalVoterRequest, QueryVoteByProposalVoterResponse, QueryVotesByProposalRequest, QueryVotesByProposalResponse, QueryVotesByVoterRequest, QueryVotesByVoterResponse, QueryGroupsByMemberRequest, QueryGroupsByMemberResponse, QueryTallyResultRequest, QueryTallyResultResponse } from "./query"; +/** Query is the cosmos.group.v1 Query service. */ +export interface Query { + /** GroupInfo queries group info based on group id. */ + groupInfo(request: QueryGroupInfoRequest): Promise; + /** GroupPolicyInfo queries group policy info based on account address of group policy. */ + groupPolicyInfo(request: QueryGroupPolicyInfoRequest): Promise; + /** GroupMembers queries members of a group */ + groupMembers(request: QueryGroupMembersRequest): Promise; + /** GroupsByAdmin queries groups by admin address. */ + groupsByAdmin(request: QueryGroupsByAdminRequest): Promise; + /** GroupPoliciesByGroup queries group policies by group id. */ + groupPoliciesByGroup(request: QueryGroupPoliciesByGroupRequest): Promise; + /** GroupsByAdmin queries group policies by admin address. */ + groupPoliciesByAdmin(request: QueryGroupPoliciesByAdminRequest): Promise; + /** Proposal queries a proposal based on proposal id. */ + proposal(request: QueryProposalRequest): Promise; + /** ProposalsByGroupPolicy queries proposals based on account address of group policy. */ + proposalsByGroupPolicy(request: QueryProposalsByGroupPolicyRequest): Promise; + /** VoteByProposalVoter queries a vote by proposal id and voter. */ + voteByProposalVoter(request: QueryVoteByProposalVoterRequest): Promise; + /** VotesByProposal queries a vote by proposal. */ + votesByProposal(request: QueryVotesByProposalRequest): Promise; + /** VotesByVoter queries a vote by voter. */ + votesByVoter(request: QueryVotesByVoterRequest): Promise; + /** GroupsByMember queries groups by member address. */ + groupsByMember(request: QueryGroupsByMemberRequest): Promise; + /** TallyResult queries the tally of a proposal votes. */ + tallyResult(request: QueryTallyResultRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + groupInfo(request: QueryGroupInfoRequest): Promise; + groupPolicyInfo(request: QueryGroupPolicyInfoRequest): Promise; + groupMembers(request: QueryGroupMembersRequest): Promise; + groupsByAdmin(request: QueryGroupsByAdminRequest): Promise; + groupPoliciesByGroup(request: QueryGroupPoliciesByGroupRequest): Promise; + groupPoliciesByAdmin(request: QueryGroupPoliciesByAdminRequest): Promise; + proposal(request: QueryProposalRequest): Promise; + proposalsByGroupPolicy(request: QueryProposalsByGroupPolicyRequest): Promise; + voteByProposalVoter(request: QueryVoteByProposalVoterRequest): Promise; + votesByProposal(request: QueryVotesByProposalRequest): Promise; + votesByVoter(request: QueryVotesByVoterRequest): Promise; + groupsByMember(request: QueryGroupsByMemberRequest): Promise; + tallyResult(request: QueryTallyResultRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + groupInfo(request: QueryGroupInfoRequest): Promise; + groupPolicyInfo(request: QueryGroupPolicyInfoRequest): Promise; + groupMembers(request: QueryGroupMembersRequest): Promise; + groupsByAdmin(request: QueryGroupsByAdminRequest): Promise; + groupPoliciesByGroup(request: QueryGroupPoliciesByGroupRequest): Promise; + groupPoliciesByAdmin(request: QueryGroupPoliciesByAdminRequest): Promise; + proposal(request: QueryProposalRequest): Promise; + proposalsByGroupPolicy(request: QueryProposalsByGroupPolicyRequest): Promise; + voteByProposalVoter(request: QueryVoteByProposalVoterRequest): Promise; + votesByProposal(request: QueryVotesByProposalRequest): Promise; + votesByVoter(request: QueryVotesByVoterRequest): Promise; + groupsByMember(request: QueryGroupsByMemberRequest): Promise; + tallyResult(request: QueryTallyResultRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/group/v1/tx.d.ts b/packages/codegen/dist/cosmos/group/v1/tx.d.ts new file mode 100644 index 00000000..325a8757 --- /dev/null +++ b/packages/codegen/dist/cosmos/group/v1/tx.d.ts @@ -0,0 +1,501 @@ +/// +import { Member, MemberSDKType, VoteOption } from "./types"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** Exec defines modes of execution of a proposal on creation or on new vote. */ +export declare enum Exec { + /** + * EXEC_UNSPECIFIED - An empty value means that there should be a separate + * MsgExec request for the proposal to execute. + */ + EXEC_UNSPECIFIED = 0, + /** + * EXEC_TRY - Try to execute the proposal immediately. + * If the proposal is not allowed per the DecisionPolicy, + * the proposal will still be open and could + * be executed at a later point. + */ + EXEC_TRY = 1, + UNRECOGNIZED = -1 +} +export declare const ExecSDKType: typeof Exec; +export declare function execFromJSON(object: any): Exec; +export declare function execToJSON(object: Exec): string; +/** MsgCreateGroup is the Msg/CreateGroup request type. */ +export interface MsgCreateGroup { + /** admin is the account address of the group admin. */ + admin: string; + /** members defines the group members. */ + members: Member[]; + /** metadata is any arbitrary metadata to attached to the group. */ + metadata: string; +} +/** MsgCreateGroup is the Msg/CreateGroup request type. */ +export interface MsgCreateGroupSDKType { + admin: string; + members: MemberSDKType[]; + metadata: string; +} +/** MsgCreateGroupResponse is the Msg/CreateGroup response type. */ +export interface MsgCreateGroupResponse { + /** group_id is the unique ID of the newly created group. */ + groupId: Long; +} +/** MsgCreateGroupResponse is the Msg/CreateGroup response type. */ +export interface MsgCreateGroupResponseSDKType { + group_id: Long; +} +/** MsgUpdateGroupMembers is the Msg/UpdateGroupMembers request type. */ +export interface MsgUpdateGroupMembers { + /** admin is the account address of the group admin. */ + admin: string; + /** group_id is the unique ID of the group. */ + groupId: Long; + /** + * member_updates is the list of members to update, + * set weight to 0 to remove a member. + */ + memberUpdates: Member[]; +} +/** MsgUpdateGroupMembers is the Msg/UpdateGroupMembers request type. */ +export interface MsgUpdateGroupMembersSDKType { + admin: string; + group_id: Long; + member_updates: MemberSDKType[]; +} +/** MsgUpdateGroupMembersResponse is the Msg/UpdateGroupMembers response type. */ +export interface MsgUpdateGroupMembersResponse { +} +/** MsgUpdateGroupMembersResponse is the Msg/UpdateGroupMembers response type. */ +export interface MsgUpdateGroupMembersResponseSDKType { +} +/** MsgUpdateGroupAdmin is the Msg/UpdateGroupAdmin request type. */ +export interface MsgUpdateGroupAdmin { + /** admin is the current account address of the group admin. */ + admin: string; + /** group_id is the unique ID of the group. */ + groupId: Long; + /** new_admin is the group new admin account address. */ + newAdmin: string; +} +/** MsgUpdateGroupAdmin is the Msg/UpdateGroupAdmin request type. */ +export interface MsgUpdateGroupAdminSDKType { + admin: string; + group_id: Long; + new_admin: string; +} +/** MsgUpdateGroupAdminResponse is the Msg/UpdateGroupAdmin response type. */ +export interface MsgUpdateGroupAdminResponse { +} +/** MsgUpdateGroupAdminResponse is the Msg/UpdateGroupAdmin response type. */ +export interface MsgUpdateGroupAdminResponseSDKType { +} +/** MsgUpdateGroupMetadata is the Msg/UpdateGroupMetadata request type. */ +export interface MsgUpdateGroupMetadata { + /** admin is the account address of the group admin. */ + admin: string; + /** group_id is the unique ID of the group. */ + groupId: Long; + /** metadata is the updated group's metadata. */ + metadata: string; +} +/** MsgUpdateGroupMetadata is the Msg/UpdateGroupMetadata request type. */ +export interface MsgUpdateGroupMetadataSDKType { + admin: string; + group_id: Long; + metadata: string; +} +/** MsgUpdateGroupMetadataResponse is the Msg/UpdateGroupMetadata response type. */ +export interface MsgUpdateGroupMetadataResponse { +} +/** MsgUpdateGroupMetadataResponse is the Msg/UpdateGroupMetadata response type. */ +export interface MsgUpdateGroupMetadataResponseSDKType { +} +/** MsgCreateGroupPolicy is the Msg/CreateGroupPolicy request type. */ +export interface MsgCreateGroupPolicy { + /** admin is the account address of the group admin. */ + admin: string; + /** group_id is the unique ID of the group. */ + groupId: Long; + /** metadata is any arbitrary metadata attached to the group policy. */ + metadata: string; + /** decision_policy specifies the group policy's decision policy. */ + decisionPolicy?: Any; +} +/** MsgCreateGroupPolicy is the Msg/CreateGroupPolicy request type. */ +export interface MsgCreateGroupPolicySDKType { + admin: string; + group_id: Long; + metadata: string; + decision_policy?: AnySDKType; +} +/** MsgCreateGroupPolicyResponse is the Msg/CreateGroupPolicy response type. */ +export interface MsgCreateGroupPolicyResponse { + /** address is the account address of the newly created group policy. */ + address: string; +} +/** MsgCreateGroupPolicyResponse is the Msg/CreateGroupPolicy response type. */ +export interface MsgCreateGroupPolicyResponseSDKType { + address: string; +} +/** MsgUpdateGroupPolicyAdmin is the Msg/UpdateGroupPolicyAdmin request type. */ +export interface MsgUpdateGroupPolicyAdmin { + /** admin is the account address of the group admin. */ + admin: string; + /** address is the account address of the group policy. */ + address: string; + /** new_admin is the new group policy admin. */ + newAdmin: string; +} +/** MsgUpdateGroupPolicyAdmin is the Msg/UpdateGroupPolicyAdmin request type. */ +export interface MsgUpdateGroupPolicyAdminSDKType { + admin: string; + address: string; + new_admin: string; +} +/** MsgCreateGroupWithPolicy is the Msg/CreateGroupWithPolicy request type. */ +export interface MsgCreateGroupWithPolicy { + /** admin is the account address of the group and group policy admin. */ + admin: string; + /** members defines the group members. */ + members: Member[]; + /** group_metadata is any arbitrary metadata attached to the group. */ + groupMetadata: string; + /** group_policy_metadata is any arbitrary metadata attached to the group policy. */ + groupPolicyMetadata: string; + /** group_policy_as_admin is a boolean field, if set to true, the group policy account address will be used as group and group policy admin. */ + groupPolicyAsAdmin: boolean; + /** decision_policy specifies the group policy's decision policy. */ + decisionPolicy?: Any; +} +/** MsgCreateGroupWithPolicy is the Msg/CreateGroupWithPolicy request type. */ +export interface MsgCreateGroupWithPolicySDKType { + admin: string; + members: MemberSDKType[]; + group_metadata: string; + group_policy_metadata: string; + group_policy_as_admin: boolean; + decision_policy?: AnySDKType; +} +/** MsgCreateGroupWithPolicyResponse is the Msg/CreateGroupWithPolicy response type. */ +export interface MsgCreateGroupWithPolicyResponse { + /** group_id is the unique ID of the newly created group with policy. */ + groupId: Long; + /** group_policy_address is the account address of the newly created group policy. */ + groupPolicyAddress: string; +} +/** MsgCreateGroupWithPolicyResponse is the Msg/CreateGroupWithPolicy response type. */ +export interface MsgCreateGroupWithPolicyResponseSDKType { + group_id: Long; + group_policy_address: string; +} +/** MsgUpdateGroupPolicyAdminResponse is the Msg/UpdateGroupPolicyAdmin response type. */ +export interface MsgUpdateGroupPolicyAdminResponse { +} +/** MsgUpdateGroupPolicyAdminResponse is the Msg/UpdateGroupPolicyAdmin response type. */ +export interface MsgUpdateGroupPolicyAdminResponseSDKType { +} +/** MsgUpdateGroupPolicyDecisionPolicy is the Msg/UpdateGroupPolicyDecisionPolicy request type. */ +export interface MsgUpdateGroupPolicyDecisionPolicy { + /** admin is the account address of the group admin. */ + admin: string; + /** address is the account address of group policy. */ + address: string; + /** decision_policy is the updated group policy's decision policy. */ + decisionPolicy?: Any; +} +/** MsgUpdateGroupPolicyDecisionPolicy is the Msg/UpdateGroupPolicyDecisionPolicy request type. */ +export interface MsgUpdateGroupPolicyDecisionPolicySDKType { + admin: string; + address: string; + decision_policy?: AnySDKType; +} +/** MsgUpdateGroupPolicyDecisionPolicyResponse is the Msg/UpdateGroupPolicyDecisionPolicy response type. */ +export interface MsgUpdateGroupPolicyDecisionPolicyResponse { +} +/** MsgUpdateGroupPolicyDecisionPolicyResponse is the Msg/UpdateGroupPolicyDecisionPolicy response type. */ +export interface MsgUpdateGroupPolicyDecisionPolicyResponseSDKType { +} +/** MsgUpdateGroupPolicyMetadata is the Msg/UpdateGroupPolicyMetadata request type. */ +export interface MsgUpdateGroupPolicyMetadata { + /** admin is the account address of the group admin. */ + admin: string; + /** address is the account address of group policy. */ + address: string; + /** metadata is the updated group policy metadata. */ + metadata: string; +} +/** MsgUpdateGroupPolicyMetadata is the Msg/UpdateGroupPolicyMetadata request type. */ +export interface MsgUpdateGroupPolicyMetadataSDKType { + admin: string; + address: string; + metadata: string; +} +/** MsgUpdateGroupPolicyMetadataResponse is the Msg/UpdateGroupPolicyMetadata response type. */ +export interface MsgUpdateGroupPolicyMetadataResponse { +} +/** MsgUpdateGroupPolicyMetadataResponse is the Msg/UpdateGroupPolicyMetadata response type. */ +export interface MsgUpdateGroupPolicyMetadataResponseSDKType { +} +/** MsgSubmitProposal is the Msg/SubmitProposal request type. */ +export interface MsgSubmitProposal { + /** address is the account address of group policy. */ + address: string; + /** + * proposers are the account addresses of the proposers. + * Proposers signatures will be counted as yes votes. + */ + proposers: string[]; + /** metadata is any arbitrary metadata to attached to the proposal. */ + metadata: string; + /** messages is a list of `sdk.Msg`s that will be executed if the proposal passes. */ + messages: Any[]; + /** + * exec defines the mode of execution of the proposal, + * whether it should be executed immediately on creation or not. + * If so, proposers signatures are considered as Yes votes. + */ + exec: Exec; +} +/** MsgSubmitProposal is the Msg/SubmitProposal request type. */ +export interface MsgSubmitProposalSDKType { + address: string; + proposers: string[]; + metadata: string; + messages: AnySDKType[]; + exec: Exec; +} +/** MsgSubmitProposalResponse is the Msg/SubmitProposal response type. */ +export interface MsgSubmitProposalResponse { + /** proposal is the unique ID of the proposal. */ + proposalId: Long; +} +/** MsgSubmitProposalResponse is the Msg/SubmitProposal response type. */ +export interface MsgSubmitProposalResponseSDKType { + proposal_id: Long; +} +/** MsgWithdrawProposal is the Msg/WithdrawProposal request type. */ +export interface MsgWithdrawProposal { + /** proposal is the unique ID of the proposal. */ + proposalId: Long; + /** address is the admin of the group policy or one of the proposer of the proposal. */ + address: string; +} +/** MsgWithdrawProposal is the Msg/WithdrawProposal request type. */ +export interface MsgWithdrawProposalSDKType { + proposal_id: Long; + address: string; +} +/** MsgWithdrawProposalResponse is the Msg/WithdrawProposal response type. */ +export interface MsgWithdrawProposalResponse { +} +/** MsgWithdrawProposalResponse is the Msg/WithdrawProposal response type. */ +export interface MsgWithdrawProposalResponseSDKType { +} +/** MsgVote is the Msg/Vote request type. */ +export interface MsgVote { + /** proposal is the unique ID of the proposal. */ + proposalId: Long; + /** voter is the voter account address. */ + voter: string; + /** option is the voter's choice on the proposal. */ + option: VoteOption; + /** metadata is any arbitrary metadata to attached to the vote. */ + metadata: string; + /** + * exec defines whether the proposal should be executed + * immediately after voting or not. + */ + exec: Exec; +} +/** MsgVote is the Msg/Vote request type. */ +export interface MsgVoteSDKType { + proposal_id: Long; + voter: string; + option: VoteOption; + metadata: string; + exec: Exec; +} +/** MsgVoteResponse is the Msg/Vote response type. */ +export interface MsgVoteResponse { +} +/** MsgVoteResponse is the Msg/Vote response type. */ +export interface MsgVoteResponseSDKType { +} +/** MsgExec is the Msg/Exec request type. */ +export interface MsgExec { + /** proposal is the unique ID of the proposal. */ + proposalId: Long; + /** signer is the account address used to execute the proposal. */ + signer: string; +} +/** MsgExec is the Msg/Exec request type. */ +export interface MsgExecSDKType { + proposal_id: Long; + signer: string; +} +/** MsgExecResponse is the Msg/Exec request type. */ +export interface MsgExecResponse { +} +/** MsgExecResponse is the Msg/Exec request type. */ +export interface MsgExecResponseSDKType { +} +/** MsgLeaveGroup is the Msg/LeaveGroup request type. */ +export interface MsgLeaveGroup { + /** address is the account address of the group member. */ + address: string; + /** group_id is the unique ID of the group. */ + groupId: Long; +} +/** MsgLeaveGroup is the Msg/LeaveGroup request type. */ +export interface MsgLeaveGroupSDKType { + address: string; + group_id: Long; +} +/** MsgLeaveGroupResponse is the Msg/LeaveGroup response type. */ +export interface MsgLeaveGroupResponse { +} +/** MsgLeaveGroupResponse is the Msg/LeaveGroup response type. */ +export interface MsgLeaveGroupResponseSDKType { +} +export declare const MsgCreateGroup: { + encode(message: MsgCreateGroup, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateGroup; + fromPartial(object: DeepPartial): MsgCreateGroup; +}; +export declare const MsgCreateGroupResponse: { + encode(message: MsgCreateGroupResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateGroupResponse; + fromPartial(object: DeepPartial): MsgCreateGroupResponse; +}; +export declare const MsgUpdateGroupMembers: { + encode(message: MsgUpdateGroupMembers, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupMembers; + fromPartial(object: DeepPartial): MsgUpdateGroupMembers; +}; +export declare const MsgUpdateGroupMembersResponse: { + encode(_: MsgUpdateGroupMembersResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupMembersResponse; + fromPartial(_: DeepPartial): MsgUpdateGroupMembersResponse; +}; +export declare const MsgUpdateGroupAdmin: { + encode(message: MsgUpdateGroupAdmin, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupAdmin; + fromPartial(object: DeepPartial): MsgUpdateGroupAdmin; +}; +export declare const MsgUpdateGroupAdminResponse: { + encode(_: MsgUpdateGroupAdminResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupAdminResponse; + fromPartial(_: DeepPartial): MsgUpdateGroupAdminResponse; +}; +export declare const MsgUpdateGroupMetadata: { + encode(message: MsgUpdateGroupMetadata, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupMetadata; + fromPartial(object: DeepPartial): MsgUpdateGroupMetadata; +}; +export declare const MsgUpdateGroupMetadataResponse: { + encode(_: MsgUpdateGroupMetadataResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupMetadataResponse; + fromPartial(_: DeepPartial): MsgUpdateGroupMetadataResponse; +}; +export declare const MsgCreateGroupPolicy: { + encode(message: MsgCreateGroupPolicy, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateGroupPolicy; + fromPartial(object: DeepPartial): MsgCreateGroupPolicy; +}; +export declare const MsgCreateGroupPolicyResponse: { + encode(message: MsgCreateGroupPolicyResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateGroupPolicyResponse; + fromPartial(object: DeepPartial): MsgCreateGroupPolicyResponse; +}; +export declare const MsgUpdateGroupPolicyAdmin: { + encode(message: MsgUpdateGroupPolicyAdmin, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupPolicyAdmin; + fromPartial(object: DeepPartial): MsgUpdateGroupPolicyAdmin; +}; +export declare const MsgCreateGroupWithPolicy: { + encode(message: MsgCreateGroupWithPolicy, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateGroupWithPolicy; + fromPartial(object: DeepPartial): MsgCreateGroupWithPolicy; +}; +export declare const MsgCreateGroupWithPolicyResponse: { + encode(message: MsgCreateGroupWithPolicyResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateGroupWithPolicyResponse; + fromPartial(object: DeepPartial): MsgCreateGroupWithPolicyResponse; +}; +export declare const MsgUpdateGroupPolicyAdminResponse: { + encode(_: MsgUpdateGroupPolicyAdminResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupPolicyAdminResponse; + fromPartial(_: DeepPartial): MsgUpdateGroupPolicyAdminResponse; +}; +export declare const MsgUpdateGroupPolicyDecisionPolicy: { + encode(message: MsgUpdateGroupPolicyDecisionPolicy, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupPolicyDecisionPolicy; + fromPartial(object: DeepPartial): MsgUpdateGroupPolicyDecisionPolicy; +}; +export declare const MsgUpdateGroupPolicyDecisionPolicyResponse: { + encode(_: MsgUpdateGroupPolicyDecisionPolicyResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupPolicyDecisionPolicyResponse; + fromPartial(_: DeepPartial): MsgUpdateGroupPolicyDecisionPolicyResponse; +}; +export declare const MsgUpdateGroupPolicyMetadata: { + encode(message: MsgUpdateGroupPolicyMetadata, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupPolicyMetadata; + fromPartial(object: DeepPartial): MsgUpdateGroupPolicyMetadata; +}; +export declare const MsgUpdateGroupPolicyMetadataResponse: { + encode(_: MsgUpdateGroupPolicyMetadataResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupPolicyMetadataResponse; + fromPartial(_: DeepPartial): MsgUpdateGroupPolicyMetadataResponse; +}; +export declare const MsgSubmitProposal: { + encode(message: MsgSubmitProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitProposal; + fromPartial(object: DeepPartial): MsgSubmitProposal; +}; +export declare const MsgSubmitProposalResponse: { + encode(message: MsgSubmitProposalResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitProposalResponse; + fromPartial(object: DeepPartial): MsgSubmitProposalResponse; +}; +export declare const MsgWithdrawProposal: { + encode(message: MsgWithdrawProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawProposal; + fromPartial(object: DeepPartial): MsgWithdrawProposal; +}; +export declare const MsgWithdrawProposalResponse: { + encode(_: MsgWithdrawProposalResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawProposalResponse; + fromPartial(_: DeepPartial): MsgWithdrawProposalResponse; +}; +export declare const MsgVote: { + encode(message: MsgVote, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVote; + fromPartial(object: DeepPartial): MsgVote; +}; +export declare const MsgVoteResponse: { + encode(_: MsgVoteResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteResponse; + fromPartial(_: DeepPartial): MsgVoteResponse; +}; +export declare const MsgExec: { + encode(message: MsgExec, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExec; + fromPartial(object: DeepPartial): MsgExec; +}; +export declare const MsgExecResponse: { + encode(_: MsgExecResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExecResponse; + fromPartial(_: DeepPartial): MsgExecResponse; +}; +export declare const MsgLeaveGroup: { + encode(message: MsgLeaveGroup, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgLeaveGroup; + fromPartial(object: DeepPartial): MsgLeaveGroup; +}; +export declare const MsgLeaveGroupResponse: { + encode(_: MsgLeaveGroupResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgLeaveGroupResponse; + fromPartial(_: DeepPartial): MsgLeaveGroupResponse; +}; diff --git a/packages/codegen/dist/cosmos/group/v1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/group/v1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..35c3e2a9 --- /dev/null +++ b/packages/codegen/dist/cosmos/group/v1/tx.rpc.msg.d.ts @@ -0,0 +1,51 @@ +import { Rpc } from "../../../helpers"; +import { MsgCreateGroup, MsgCreateGroupResponse, MsgUpdateGroupMembers, MsgUpdateGroupMembersResponse, MsgUpdateGroupAdmin, MsgUpdateGroupAdminResponse, MsgUpdateGroupMetadata, MsgUpdateGroupMetadataResponse, MsgCreateGroupPolicy, MsgCreateGroupPolicyResponse, MsgCreateGroupWithPolicy, MsgCreateGroupWithPolicyResponse, MsgUpdateGroupPolicyAdmin, MsgUpdateGroupPolicyAdminResponse, MsgUpdateGroupPolicyDecisionPolicy, MsgUpdateGroupPolicyDecisionPolicyResponse, MsgUpdateGroupPolicyMetadata, MsgUpdateGroupPolicyMetadataResponse, MsgSubmitProposal, MsgSubmitProposalResponse, MsgWithdrawProposal, MsgWithdrawProposalResponse, MsgVote, MsgVoteResponse, MsgExec, MsgExecResponse, MsgLeaveGroup, MsgLeaveGroupResponse } from "./tx"; +/** Msg is the cosmos.group.v1 Msg service. */ +export interface Msg { + /** CreateGroup creates a new group with an admin account address, a list of members and some optional metadata. */ + createGroup(request: MsgCreateGroup): Promise; + /** UpdateGroupMembers updates the group members with given group id and admin address. */ + updateGroupMembers(request: MsgUpdateGroupMembers): Promise; + /** UpdateGroupAdmin updates the group admin with given group id and previous admin address. */ + updateGroupAdmin(request: MsgUpdateGroupAdmin): Promise; + /** UpdateGroupMetadata updates the group metadata with given group id and admin address. */ + updateGroupMetadata(request: MsgUpdateGroupMetadata): Promise; + /** CreateGroupPolicy creates a new group policy using given DecisionPolicy. */ + createGroupPolicy(request: MsgCreateGroupPolicy): Promise; + /** CreateGroupWithPolicy creates a new group with policy. */ + createGroupWithPolicy(request: MsgCreateGroupWithPolicy): Promise; + /** UpdateGroupPolicyAdmin updates a group policy admin. */ + updateGroupPolicyAdmin(request: MsgUpdateGroupPolicyAdmin): Promise; + /** UpdateGroupPolicyDecisionPolicy allows a group policy's decision policy to be updated. */ + updateGroupPolicyDecisionPolicy(request: MsgUpdateGroupPolicyDecisionPolicy): Promise; + /** UpdateGroupPolicyMetadata updates a group policy metadata. */ + updateGroupPolicyMetadata(request: MsgUpdateGroupPolicyMetadata): Promise; + /** SubmitProposal submits a new proposal. */ + submitProposal(request: MsgSubmitProposal): Promise; + /** WithdrawProposal aborts a proposal. */ + withdrawProposal(request: MsgWithdrawProposal): Promise; + /** Vote allows a voter to vote on a proposal. */ + vote(request: MsgVote): Promise; + /** Exec executes a proposal. */ + exec(request: MsgExec): Promise; + /** LeaveGroup allows a group member to leave the group. */ + leaveGroup(request: MsgLeaveGroup): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + createGroup(request: MsgCreateGroup): Promise; + updateGroupMembers(request: MsgUpdateGroupMembers): Promise; + updateGroupAdmin(request: MsgUpdateGroupAdmin): Promise; + updateGroupMetadata(request: MsgUpdateGroupMetadata): Promise; + createGroupPolicy(request: MsgCreateGroupPolicy): Promise; + createGroupWithPolicy(request: MsgCreateGroupWithPolicy): Promise; + updateGroupPolicyAdmin(request: MsgUpdateGroupPolicyAdmin): Promise; + updateGroupPolicyDecisionPolicy(request: MsgUpdateGroupPolicyDecisionPolicy): Promise; + updateGroupPolicyMetadata(request: MsgUpdateGroupPolicyMetadata): Promise; + submitProposal(request: MsgSubmitProposal): Promise; + withdrawProposal(request: MsgWithdrawProposal): Promise; + vote(request: MsgVote): Promise; + exec(request: MsgExec): Promise; + leaveGroup(request: MsgLeaveGroup): Promise; +} diff --git a/packages/codegen/dist/cosmos/group/v1/types.d.ts b/packages/codegen/dist/cosmos/group/v1/types.d.ts new file mode 100644 index 00000000..236c0278 --- /dev/null +++ b/packages/codegen/dist/cosmos/group/v1/types.d.ts @@ -0,0 +1,396 @@ +/// +import { Duration, DurationSDKType } from "../../../google/protobuf/duration"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** VoteOption enumerates the valid vote options for a given proposal. */ +export declare enum VoteOption { + /** VOTE_OPTION_UNSPECIFIED - VOTE_OPTION_UNSPECIFIED defines a no-op vote option. */ + VOTE_OPTION_UNSPECIFIED = 0, + /** VOTE_OPTION_YES - VOTE_OPTION_YES defines a yes vote option. */ + VOTE_OPTION_YES = 1, + /** VOTE_OPTION_ABSTAIN - VOTE_OPTION_ABSTAIN defines an abstain vote option. */ + VOTE_OPTION_ABSTAIN = 2, + /** VOTE_OPTION_NO - VOTE_OPTION_NO defines a no vote option. */ + VOTE_OPTION_NO = 3, + /** VOTE_OPTION_NO_WITH_VETO - VOTE_OPTION_NO_WITH_VETO defines a no with veto vote option. */ + VOTE_OPTION_NO_WITH_VETO = 4, + UNRECOGNIZED = -1 +} +export declare const VoteOptionSDKType: typeof VoteOption; +export declare function voteOptionFromJSON(object: any): VoteOption; +export declare function voteOptionToJSON(object: VoteOption): string; +/** ProposalStatus defines proposal statuses. */ +export declare enum ProposalStatus { + /** PROPOSAL_STATUS_UNSPECIFIED - An empty value is invalid and not allowed. */ + PROPOSAL_STATUS_UNSPECIFIED = 0, + /** PROPOSAL_STATUS_SUBMITTED - Initial status of a proposal when persisted. */ + PROPOSAL_STATUS_SUBMITTED = 1, + /** PROPOSAL_STATUS_CLOSED - Final status of a proposal when the final tally was executed. */ + PROPOSAL_STATUS_CLOSED = 2, + /** PROPOSAL_STATUS_ABORTED - Final status of a proposal when the group was modified before the final tally. */ + PROPOSAL_STATUS_ABORTED = 3, + /** + * PROPOSAL_STATUS_WITHDRAWN - A proposal can be deleted before the voting start time by the owner. When this happens the final status + * is Withdrawn. + */ + PROPOSAL_STATUS_WITHDRAWN = 4, + UNRECOGNIZED = -1 +} +export declare const ProposalStatusSDKType: typeof ProposalStatus; +export declare function proposalStatusFromJSON(object: any): ProposalStatus; +export declare function proposalStatusToJSON(object: ProposalStatus): string; +/** ProposalResult defines types of proposal results. */ +export declare enum ProposalResult { + /** PROPOSAL_RESULT_UNSPECIFIED - An empty value is invalid and not allowed */ + PROPOSAL_RESULT_UNSPECIFIED = 0, + /** PROPOSAL_RESULT_UNFINALIZED - Until a final tally has happened the status is unfinalized */ + PROPOSAL_RESULT_UNFINALIZED = 1, + /** PROPOSAL_RESULT_ACCEPTED - Final result of the tally */ + PROPOSAL_RESULT_ACCEPTED = 2, + /** PROPOSAL_RESULT_REJECTED - Final result of the tally */ + PROPOSAL_RESULT_REJECTED = 3, + UNRECOGNIZED = -1 +} +export declare const ProposalResultSDKType: typeof ProposalResult; +export declare function proposalResultFromJSON(object: any): ProposalResult; +export declare function proposalResultToJSON(object: ProposalResult): string; +/** ProposalExecutorResult defines types of proposal executor results. */ +export declare enum ProposalExecutorResult { + /** PROPOSAL_EXECUTOR_RESULT_UNSPECIFIED - An empty value is not allowed. */ + PROPOSAL_EXECUTOR_RESULT_UNSPECIFIED = 0, + /** PROPOSAL_EXECUTOR_RESULT_NOT_RUN - We have not yet run the executor. */ + PROPOSAL_EXECUTOR_RESULT_NOT_RUN = 1, + /** PROPOSAL_EXECUTOR_RESULT_SUCCESS - The executor was successful and proposed action updated state. */ + PROPOSAL_EXECUTOR_RESULT_SUCCESS = 2, + /** PROPOSAL_EXECUTOR_RESULT_FAILURE - The executor returned an error and proposed action didn't update state. */ + PROPOSAL_EXECUTOR_RESULT_FAILURE = 3, + UNRECOGNIZED = -1 +} +export declare const ProposalExecutorResultSDKType: typeof ProposalExecutorResult; +export declare function proposalExecutorResultFromJSON(object: any): ProposalExecutorResult; +export declare function proposalExecutorResultToJSON(object: ProposalExecutorResult): string; +/** + * Member represents a group member with an account address, + * non-zero weight and metadata. + */ +export interface Member { + /** address is the member's account address. */ + address: string; + /** weight is the member's voting weight that should be greater than 0. */ + weight: string; + /** metadata is any arbitrary metadata to attached to the member. */ + metadata: string; + /** added_at is a timestamp specifying when a member was added. */ + addedAt?: Date; +} +/** + * Member represents a group member with an account address, + * non-zero weight and metadata. + */ +export interface MemberSDKType { + address: string; + weight: string; + metadata: string; + added_at?: Date; +} +/** Members defines a repeated slice of Member objects. */ +export interface Members { + /** members is the list of members. */ + members: Member[]; +} +/** Members defines a repeated slice of Member objects. */ +export interface MembersSDKType { + members: MemberSDKType[]; +} +/** ThresholdDecisionPolicy implements the DecisionPolicy interface */ +export interface ThresholdDecisionPolicy { + /** threshold is the minimum weighted sum of yes votes that must be met or exceeded for a proposal to succeed. */ + threshold: string; + /** windows defines the different windows for voting and execution. */ + windows?: DecisionPolicyWindows; +} +/** ThresholdDecisionPolicy implements the DecisionPolicy interface */ +export interface ThresholdDecisionPolicySDKType { + threshold: string; + windows?: DecisionPolicyWindowsSDKType; +} +/** PercentageDecisionPolicy implements the DecisionPolicy interface */ +export interface PercentageDecisionPolicy { + /** percentage is the minimum percentage the weighted sum of yes votes must meet for a proposal to succeed. */ + percentage: string; + /** windows defines the different windows for voting and execution. */ + windows?: DecisionPolicyWindows; +} +/** PercentageDecisionPolicy implements the DecisionPolicy interface */ +export interface PercentageDecisionPolicySDKType { + percentage: string; + windows?: DecisionPolicyWindowsSDKType; +} +/** DecisionPolicyWindows defines the different windows for voting and execution. */ +export interface DecisionPolicyWindows { + /** + * voting_period is the duration from submission of a proposal to the end of voting period + * Within this times votes can be submitted with MsgVote. + */ + votingPeriod?: Duration; + /** + * min_execution_period is the minimum duration after the proposal submission + * where members can start sending MsgExec. This means that the window for + * sending a MsgExec transaction is: + * `[ submission + min_execution_period ; submission + voting_period + max_execution_period]` + * where max_execution_period is a app-specific config, defined in the keeper. + * If not set, min_execution_period will default to 0. + * + * Please make sure to set a `min_execution_period` that is smaller than + * `voting_period + max_execution_period`, or else the above execution window + * is empty, meaning that all proposals created with this decision policy + * won't be able to be executed. + */ + minExecutionPeriod?: Duration; +} +/** DecisionPolicyWindows defines the different windows for voting and execution. */ +export interface DecisionPolicyWindowsSDKType { + voting_period?: DurationSDKType; + min_execution_period?: DurationSDKType; +} +/** GroupInfo represents the high-level on-chain information for a group. */ +export interface GroupInfo { + /** id is the unique ID of the group. */ + id: Long; + /** admin is the account address of the group's admin. */ + admin: string; + /** metadata is any arbitrary metadata to attached to the group. */ + metadata: string; + /** + * version is used to track changes to a group's membership structure that + * would break existing proposals. Whenever any members weight is changed, + * or any member is added or removed this version is incremented and will + * cause proposals based on older versions of this group to fail + */ + version: Long; + /** total_weight is the sum of the group members' weights. */ + totalWeight: string; + /** created_at is a timestamp specifying when a group was created. */ + createdAt?: Date; +} +/** GroupInfo represents the high-level on-chain information for a group. */ +export interface GroupInfoSDKType { + id: Long; + admin: string; + metadata: string; + version: Long; + total_weight: string; + created_at?: Date; +} +/** GroupMember represents the relationship between a group and a member. */ +export interface GroupMember { + /** group_id is the unique ID of the group. */ + groupId: Long; + /** member is the member data. */ + member?: Member; +} +/** GroupMember represents the relationship between a group and a member. */ +export interface GroupMemberSDKType { + group_id: Long; + member?: MemberSDKType; +} +/** GroupPolicyInfo represents the high-level on-chain information for a group policy. */ +export interface GroupPolicyInfo { + /** address is the account address of group policy. */ + address: string; + /** group_id is the unique ID of the group. */ + groupId: Long; + /** admin is the account address of the group admin. */ + admin: string; + /** metadata is any arbitrary metadata to attached to the group policy. */ + metadata: string; + /** + * version is used to track changes to a group's GroupPolicyInfo structure that + * would create a different result on a running proposal. + */ + version: Long; + /** decision_policy specifies the group policy's decision policy. */ + decisionPolicy?: Any; + /** created_at is a timestamp specifying when a group policy was created. */ + createdAt?: Date; +} +/** GroupPolicyInfo represents the high-level on-chain information for a group policy. */ +export interface GroupPolicyInfoSDKType { + address: string; + group_id: Long; + admin: string; + metadata: string; + version: Long; + decision_policy?: AnySDKType; + created_at?: Date; +} +/** + * Proposal defines a group proposal. Any member of a group can submit a proposal + * for a group policy to decide upon. + * A proposal consists of a set of `sdk.Msg`s that will be executed if the proposal + * passes as well as some optional metadata associated with the proposal. + */ +export interface Proposal { + /** id is the unique id of the proposal. */ + id: Long; + /** address is the account address of group policy. */ + address: string; + /** metadata is any arbitrary metadata to attached to the proposal. */ + metadata: string; + /** proposers are the account addresses of the proposers. */ + proposers: string[]; + /** submit_time is a timestamp specifying when a proposal was submitted. */ + submitTime?: Date; + /** + * group_version tracks the version of the group that this proposal corresponds to. + * When group membership is changed, existing proposals from previous group versions will become invalid. + */ + groupVersion: Long; + /** + * group_policy_version tracks the version of the group policy that this proposal corresponds to. + * When a decision policy is changed, existing proposals from previous policy versions will become invalid. + */ + groupPolicyVersion: Long; + /** status represents the high level position in the life cycle of the proposal. Initial value is Submitted. */ + status: ProposalStatus; + /** + * result is the final result based on the votes and election rule. Initial value is unfinalized. + * The result is persisted so that clients can always rely on this state and not have to replicate the logic. + */ + result: ProposalResult; + /** + * final_tally_result contains the sums of all weighted votes for this + * proposal for each vote option, after tallying. When querying a proposal + * via gRPC, this field is not populated until the proposal's voting period + * has ended. + */ + finalTallyResult?: TallyResult; + /** + * voting_period_end is the timestamp before which voting must be done. + * Unless a successfull MsgExec is called before (to execute a proposal whose + * tally is successful before the voting period ends), tallying will be done + * at this point, and the `final_tally_result`, as well + * as `status` and `result` fields will be accordingly updated. + */ + votingPeriodEnd?: Date; + /** executor_result is the final result based on the votes and election rule. Initial value is NotRun. */ + executorResult: ProposalExecutorResult; + /** messages is a list of Msgs that will be executed if the proposal passes. */ + messages: Any[]; +} +/** + * Proposal defines a group proposal. Any member of a group can submit a proposal + * for a group policy to decide upon. + * A proposal consists of a set of `sdk.Msg`s that will be executed if the proposal + * passes as well as some optional metadata associated with the proposal. + */ +export interface ProposalSDKType { + id: Long; + address: string; + metadata: string; + proposers: string[]; + submit_time?: Date; + group_version: Long; + group_policy_version: Long; + status: ProposalStatus; + result: ProposalResult; + final_tally_result?: TallyResultSDKType; + voting_period_end?: Date; + executor_result: ProposalExecutorResult; + messages: AnySDKType[]; +} +/** TallyResult represents the sum of weighted votes for each vote option. */ +export interface TallyResult { + /** yes_count is the weighted sum of yes votes. */ + yesCount: string; + /** abstain_count is the weighted sum of abstainers. */ + abstainCount: string; + /** no is the weighted sum of no votes. */ + noCount: string; + /** no_with_veto_count is the weighted sum of veto. */ + noWithVetoCount: string; +} +/** TallyResult represents the sum of weighted votes for each vote option. */ +export interface TallyResultSDKType { + yes_count: string; + abstain_count: string; + no_count: string; + no_with_veto_count: string; +} +/** Vote represents a vote for a proposal. */ +export interface Vote { + /** proposal is the unique ID of the proposal. */ + proposalId: Long; + /** voter is the account address of the voter. */ + voter: string; + /** option is the voter's choice on the proposal. */ + option: VoteOption; + /** metadata is any arbitrary metadata to attached to the vote. */ + metadata: string; + /** submit_time is the timestamp when the vote was submitted. */ + submitTime?: Date; +} +/** Vote represents a vote for a proposal. */ +export interface VoteSDKType { + proposal_id: Long; + voter: string; + option: VoteOption; + metadata: string; + submit_time?: Date; +} +export declare const Member: { + encode(message: Member, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Member; + fromPartial(object: DeepPartial): Member; +}; +export declare const Members: { + encode(message: Members, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Members; + fromPartial(object: DeepPartial): Members; +}; +export declare const ThresholdDecisionPolicy: { + encode(message: ThresholdDecisionPolicy, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ThresholdDecisionPolicy; + fromPartial(object: DeepPartial): ThresholdDecisionPolicy; +}; +export declare const PercentageDecisionPolicy: { + encode(message: PercentageDecisionPolicy, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PercentageDecisionPolicy; + fromPartial(object: DeepPartial): PercentageDecisionPolicy; +}; +export declare const DecisionPolicyWindows: { + encode(message: DecisionPolicyWindows, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DecisionPolicyWindows; + fromPartial(object: DeepPartial): DecisionPolicyWindows; +}; +export declare const GroupInfo: { + encode(message: GroupInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GroupInfo; + fromPartial(object: DeepPartial): GroupInfo; +}; +export declare const GroupMember: { + encode(message: GroupMember, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GroupMember; + fromPartial(object: DeepPartial): GroupMember; +}; +export declare const GroupPolicyInfo: { + encode(message: GroupPolicyInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GroupPolicyInfo; + fromPartial(object: DeepPartial): GroupPolicyInfo; +}; +export declare const Proposal: { + encode(message: Proposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Proposal; + fromPartial(object: DeepPartial): Proposal; +}; +export declare const TallyResult: { + encode(message: TallyResult, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TallyResult; + fromPartial(object: DeepPartial): TallyResult; +}; +export declare const Vote: { + encode(message: Vote, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Vote; + fromPartial(object: DeepPartial): Vote; +}; diff --git a/packages/codegen/dist/cosmos/lcd.d.ts b/packages/codegen/dist/cosmos/lcd.d.ts new file mode 100644 index 00000000..9be4d8ca --- /dev/null +++ b/packages/codegen/dist/cosmos/lcd.d.ts @@ -0,0 +1,57 @@ +export declare const createLCDClient: ({ restEndpoint }: { + restEndpoint: string; +}) => Promise<{ + cosmos: { + auth: { + v1beta1: import("./auth/v1beta1/query.lcd").LCDQueryClient; + }; + authz: { + v1beta1: import("./authz/v1beta1/query.lcd").LCDQueryClient; + }; + bank: { + v1beta1: import("./bank/v1beta1/query.lcd").LCDQueryClient; + }; + base: { + tendermint: { + v1beta1: import("./base/tendermint/v1beta1/query.lcd").LCDQueryClient; + }; + }; + distribution: { + v1beta1: import("./distribution/v1beta1/query.lcd").LCDQueryClient; + }; + evidence: { + v1beta1: import("./evidence/v1beta1/query.lcd").LCDQueryClient; + }; + feegrant: { + v1beta1: import("./feegrant/v1beta1/query.lcd").LCDQueryClient; + }; + gov: { + v1: import("./gov/v1/query.lcd").LCDQueryClient; + v1beta1: import("./gov/v1beta1/query.lcd").LCDQueryClient; + }; + group: { + v1: import("./group/v1/query.lcd").LCDQueryClient; + }; + mint: { + v1beta1: import("./mint/v1beta1/query.lcd").LCDQueryClient; + }; + nft: { + v1beta1: import("./nft/v1beta1/query.lcd").LCDQueryClient; + }; + params: { + v1beta1: import("./params/v1beta1/query.lcd").LCDQueryClient; + }; + slashing: { + v1beta1: import("./slashing/v1beta1/query.lcd").LCDQueryClient; + }; + staking: { + v1beta1: import("./staking/v1beta1/query.lcd").LCDQueryClient; + }; + tx: { + v1beta1: import("./tx/v1beta1/service.lcd").LCDQueryClient; + }; + upgrade: { + v1beta1: import("./upgrade/v1beta1/query.lcd").LCDQueryClient; + }; + }; +}>; diff --git a/packages/codegen/dist/cosmos/mint/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/mint/v1beta1/genesis.d.ts new file mode 100644 index 00000000..155c109a --- /dev/null +++ b/packages/codegen/dist/cosmos/mint/v1beta1/genesis.d.ts @@ -0,0 +1,20 @@ +import { Minter, MinterSDKType, Params, ParamsSDKType } from "./mint"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the mint module's genesis state. */ +export interface GenesisState { + /** minter is a space for holding current inflation information. */ + minter?: Minter; + /** params defines all the paramaters of the module. */ + params?: Params; +} +/** GenesisState defines the mint module's genesis state. */ +export interface GenesisStateSDKType { + minter?: MinterSDKType; + params?: ParamsSDKType; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/cosmos/mint/v1beta1/mint.d.ts b/packages/codegen/dist/cosmos/mint/v1beta1/mint.d.ts new file mode 100644 index 00000000..0dc5a3bd --- /dev/null +++ b/packages/codegen/dist/cosmos/mint/v1beta1/mint.d.ts @@ -0,0 +1,49 @@ +/// +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** Minter represents the minting state. */ +export interface Minter { + /** current annual inflation rate */ + inflation: string; + /** current annual expected provisions */ + annualProvisions: string; +} +/** Minter represents the minting state. */ +export interface MinterSDKType { + inflation: string; + annual_provisions: string; +} +/** Params holds parameters for the mint module. */ +export interface Params { + /** type of coin to mint */ + mintDenom: string; + /** maximum annual change in inflation rate */ + inflationRateChange: string; + /** maximum inflation rate */ + inflationMax: string; + /** minimum inflation rate */ + inflationMin: string; + /** goal of percent bonded atoms */ + goalBonded: string; + /** expected blocks per year */ + blocksPerYear: Long; +} +/** Params holds parameters for the mint module. */ +export interface ParamsSDKType { + mint_denom: string; + inflation_rate_change: string; + inflation_max: string; + inflation_min: string; + goal_bonded: string; + blocks_per_year: Long; +} +export declare const Minter: { + encode(message: Minter, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Minter; + fromPartial(object: DeepPartial): Minter; +}; +export declare const Params: { + encode(message: Params, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Params; + fromPartial(object: DeepPartial): Params; +}; diff --git a/packages/codegen/dist/cosmos/mint/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/mint/v1beta1/query.d.ts new file mode 100644 index 00000000..a9e3b1c9 --- /dev/null +++ b/packages/codegen/dist/cosmos/mint/v1beta1/query.d.ts @@ -0,0 +1,96 @@ +import { Params, ParamsSDKType } from "./mint"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequest { +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequestSDKType { +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params?: Params; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +/** QueryInflationRequest is the request type for the Query/Inflation RPC method. */ +export interface QueryInflationRequest { +} +/** QueryInflationRequest is the request type for the Query/Inflation RPC method. */ +export interface QueryInflationRequestSDKType { +} +/** + * QueryInflationResponse is the response type for the Query/Inflation RPC + * method. + */ +export interface QueryInflationResponse { + /** inflation is the current minting inflation value. */ + inflation: Uint8Array; +} +/** + * QueryInflationResponse is the response type for the Query/Inflation RPC + * method. + */ +export interface QueryInflationResponseSDKType { + inflation: Uint8Array; +} +/** + * QueryAnnualProvisionsRequest is the request type for the + * Query/AnnualProvisions RPC method. + */ +export interface QueryAnnualProvisionsRequest { +} +/** + * QueryAnnualProvisionsRequest is the request type for the + * Query/AnnualProvisions RPC method. + */ +export interface QueryAnnualProvisionsRequestSDKType { +} +/** + * QueryAnnualProvisionsResponse is the response type for the + * Query/AnnualProvisions RPC method. + */ +export interface QueryAnnualProvisionsResponse { + /** annual_provisions is the current minting annual provisions value. */ + annualProvisions: Uint8Array; +} +/** + * QueryAnnualProvisionsResponse is the response type for the + * Query/AnnualProvisions RPC method. + */ +export interface QueryAnnualProvisionsResponseSDKType { + annual_provisions: Uint8Array; +} +export declare const QueryParamsRequest: { + encode(_: QueryParamsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest; + fromPartial(_: DeepPartial): QueryParamsRequest; +}; +export declare const QueryParamsResponse: { + encode(message: QueryParamsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse; + fromPartial(object: DeepPartial): QueryParamsResponse; +}; +export declare const QueryInflationRequest: { + encode(_: QueryInflationRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryInflationRequest; + fromPartial(_: DeepPartial): QueryInflationRequest; +}; +export declare const QueryInflationResponse: { + encode(message: QueryInflationResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryInflationResponse; + fromPartial(object: DeepPartial): QueryInflationResponse; +}; +export declare const QueryAnnualProvisionsRequest: { + encode(_: QueryAnnualProvisionsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAnnualProvisionsRequest; + fromPartial(_: DeepPartial): QueryAnnualProvisionsRequest; +}; +export declare const QueryAnnualProvisionsResponse: { + encode(message: QueryAnnualProvisionsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAnnualProvisionsResponse; + fromPartial(object: DeepPartial): QueryAnnualProvisionsResponse; +}; diff --git a/packages/codegen/dist/cosmos/mint/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/mint/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..bab04928 --- /dev/null +++ b/packages/codegen/dist/cosmos/mint/v1beta1/query.lcd.d.ts @@ -0,0 +1,11 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryParamsRequest, QueryParamsResponseSDKType, QueryInflationRequest, QueryInflationResponseSDKType, QueryAnnualProvisionsRequest, QueryAnnualProvisionsResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + params(_params?: QueryParamsRequest): Promise; + inflation(_params?: QueryInflationRequest): Promise; + annualProvisions(_params?: QueryAnnualProvisionsRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/mint/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/mint/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..83fc7060 --- /dev/null +++ b/packages/codegen/dist/cosmos/mint/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,24 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryParamsRequest, QueryParamsResponse, QueryInflationRequest, QueryInflationResponse, QueryAnnualProvisionsRequest, QueryAnnualProvisionsResponse } from "./query"; +/** Query provides defines the gRPC querier service. */ +export interface Query { + /** Params returns the total set of minting parameters. */ + params(request?: QueryParamsRequest): Promise; + /** Inflation returns the current minting inflation value. */ + inflation(request?: QueryInflationRequest): Promise; + /** AnnualProvisions current minting annual provisions value. */ + annualProvisions(request?: QueryAnnualProvisionsRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + params(request?: QueryParamsRequest): Promise; + inflation(request?: QueryInflationRequest): Promise; + annualProvisions(request?: QueryAnnualProvisionsRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + params(request?: QueryParamsRequest): Promise; + inflation(request?: QueryInflationRequest): Promise; + annualProvisions(request?: QueryAnnualProvisionsRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/msg/v1/msg.d.ts b/packages/codegen/dist/cosmos/msg/v1/msg.d.ts new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/packages/codegen/dist/cosmos/msg/v1/msg.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/packages/codegen/dist/cosmos/nft/v1beta1/event.d.ts b/packages/codegen/dist/cosmos/nft/v1beta1/event.d.ts new file mode 100644 index 00000000..311a6b0b --- /dev/null +++ b/packages/codegen/dist/cosmos/nft/v1beta1/event.d.ts @@ -0,0 +1,55 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** EventSend is emitted on Msg/Send */ +export interface EventSend { + classId: string; + id: string; + sender: string; + receiver: string; +} +/** EventSend is emitted on Msg/Send */ +export interface EventSendSDKType { + class_id: string; + id: string; + sender: string; + receiver: string; +} +/** EventMint is emitted on Mint */ +export interface EventMint { + classId: string; + id: string; + owner: string; +} +/** EventMint is emitted on Mint */ +export interface EventMintSDKType { + class_id: string; + id: string; + owner: string; +} +/** EventBurn is emitted on Burn */ +export interface EventBurn { + classId: string; + id: string; + owner: string; +} +/** EventBurn is emitted on Burn */ +export interface EventBurnSDKType { + class_id: string; + id: string; + owner: string; +} +export declare const EventSend: { + encode(message: EventSend, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventSend; + fromPartial(object: DeepPartial): EventSend; +}; +export declare const EventMint: { + encode(message: EventMint, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventMint; + fromPartial(object: DeepPartial): EventMint; +}; +export declare const EventBurn: { + encode(message: EventBurn, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventBurn; + fromPartial(object: DeepPartial): EventBurn; +}; diff --git a/packages/codegen/dist/cosmos/nft/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/nft/v1beta1/genesis.d.ts new file mode 100644 index 00000000..cf650ded --- /dev/null +++ b/packages/codegen/dist/cosmos/nft/v1beta1/genesis.d.ts @@ -0,0 +1,36 @@ +import { Class, ClassSDKType, NFT, NFTSDKType } from "./nft"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the nft module's genesis state. */ +export interface GenesisState { + /** class defines the class of the nft type. */ + classes: Class[]; + entries: Entry[]; +} +/** GenesisState defines the nft module's genesis state. */ +export interface GenesisStateSDKType { + classes: ClassSDKType[]; + entries: EntrySDKType[]; +} +/** Entry Defines all nft owned by a person */ +export interface Entry { + /** owner is the owner address of the following nft */ + owner: string; + /** nfts is a group of nfts of the same owner */ + nfts: NFT[]; +} +/** Entry Defines all nft owned by a person */ +export interface EntrySDKType { + owner: string; + nfts: NFTSDKType[]; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; +export declare const Entry: { + encode(message: Entry, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Entry; + fromPartial(object: DeepPartial): Entry; +}; diff --git a/packages/codegen/dist/cosmos/nft/v1beta1/nft.d.ts b/packages/codegen/dist/cosmos/nft/v1beta1/nft.d.ts new file mode 100644 index 00000000..95f836c0 --- /dev/null +++ b/packages/codegen/dist/cosmos/nft/v1beta1/nft.d.ts @@ -0,0 +1,61 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** Class defines the class of the nft type. */ +export interface Class { + /** id defines the unique identifier of the NFT classification, similar to the contract address of ERC721 */ + id: string; + /** name defines the human-readable name of the NFT classification. Optional */ + name: string; + /** symbol is an abbreviated name for nft classification. Optional */ + symbol: string; + /** description is a brief description of nft classification. Optional */ + description: string; + /** uri for the class metadata stored off chain. It can define schema for Class and NFT `Data` attributes. Optional */ + uri: string; + /** uri_hash is a hash of the document pointed by uri. Optional */ + uriHash: string; + /** data is the app specific metadata of the NFT class. Optional */ + data?: Any; +} +/** Class defines the class of the nft type. */ +export interface ClassSDKType { + id: string; + name: string; + symbol: string; + description: string; + uri: string; + uri_hash: string; + data?: AnySDKType; +} +/** NFT defines the NFT. */ +export interface NFT { + /** class_id associated with the NFT, similar to the contract address of ERC721 */ + classId: string; + /** id is a unique identifier of the NFT */ + id: string; + /** uri for the NFT metadata stored off chain */ + uri: string; + /** uri_hash is a hash of the document pointed by uri */ + uriHash: string; + /** data is an app specific data of the NFT. Optional */ + data?: Any; +} +/** NFT defines the NFT. */ +export interface NFTSDKType { + class_id: string; + id: string; + uri: string; + uri_hash: string; + data?: AnySDKType; +} +export declare const Class: { + encode(message: Class, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Class; + fromPartial(object: DeepPartial): Class; +}; +export declare const NFT: { + encode(message: NFT, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): NFT; + fromPartial(object: DeepPartial): NFT; +}; diff --git a/packages/codegen/dist/cosmos/nft/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/nft/v1beta1/query.d.ts new file mode 100644 index 00000000..f1a0e377 --- /dev/null +++ b/packages/codegen/dist/cosmos/nft/v1beta1/query.d.ts @@ -0,0 +1,202 @@ +/// +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { NFT, NFTSDKType, Class, ClassSDKType } from "./nft"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** QueryBalanceRequest is the request type for the Query/Balance RPC method */ +export interface QueryBalanceRequest { + classId: string; + owner: string; +} +/** QueryBalanceRequest is the request type for the Query/Balance RPC method */ +export interface QueryBalanceRequestSDKType { + class_id: string; + owner: string; +} +/** QueryBalanceResponse is the response type for the Query/Balance RPC method */ +export interface QueryBalanceResponse { + amount: Long; +} +/** QueryBalanceResponse is the response type for the Query/Balance RPC method */ +export interface QueryBalanceResponseSDKType { + amount: Long; +} +/** QueryOwnerRequest is the request type for the Query/Owner RPC method */ +export interface QueryOwnerRequest { + classId: string; + id: string; +} +/** QueryOwnerRequest is the request type for the Query/Owner RPC method */ +export interface QueryOwnerRequestSDKType { + class_id: string; + id: string; +} +/** QueryOwnerResponse is the response type for the Query/Owner RPC method */ +export interface QueryOwnerResponse { + owner: string; +} +/** QueryOwnerResponse is the response type for the Query/Owner RPC method */ +export interface QueryOwnerResponseSDKType { + owner: string; +} +/** QuerySupplyRequest is the request type for the Query/Supply RPC method */ +export interface QuerySupplyRequest { + classId: string; +} +/** QuerySupplyRequest is the request type for the Query/Supply RPC method */ +export interface QuerySupplyRequestSDKType { + class_id: string; +} +/** QuerySupplyResponse is the response type for the Query/Supply RPC method */ +export interface QuerySupplyResponse { + amount: Long; +} +/** QuerySupplyResponse is the response type for the Query/Supply RPC method */ +export interface QuerySupplyResponseSDKType { + amount: Long; +} +/** QueryNFTstRequest is the request type for the Query/NFTs RPC method */ +export interface QueryNFTsRequest { + classId: string; + owner: string; + pagination?: PageRequest; +} +/** QueryNFTstRequest is the request type for the Query/NFTs RPC method */ +export interface QueryNFTsRequestSDKType { + class_id: string; + owner: string; + pagination?: PageRequestSDKType; +} +/** QueryNFTsResponse is the response type for the Query/NFTs RPC methods */ +export interface QueryNFTsResponse { + nfts: NFT[]; + pagination?: PageResponse; +} +/** QueryNFTsResponse is the response type for the Query/NFTs RPC methods */ +export interface QueryNFTsResponseSDKType { + nfts: NFTSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryNFTRequest is the request type for the Query/NFT RPC method */ +export interface QueryNFTRequest { + classId: string; + id: string; +} +/** QueryNFTRequest is the request type for the Query/NFT RPC method */ +export interface QueryNFTRequestSDKType { + class_id: string; + id: string; +} +/** QueryNFTResponse is the response type for the Query/NFT RPC method */ +export interface QueryNFTResponse { + nft?: NFT; +} +/** QueryNFTResponse is the response type for the Query/NFT RPC method */ +export interface QueryNFTResponseSDKType { + nft?: NFTSDKType; +} +/** QueryClassRequest is the request type for the Query/Class RPC method */ +export interface QueryClassRequest { + classId: string; +} +/** QueryClassRequest is the request type for the Query/Class RPC method */ +export interface QueryClassRequestSDKType { + class_id: string; +} +/** QueryClassResponse is the response type for the Query/Class RPC method */ +export interface QueryClassResponse { + class?: Class; +} +/** QueryClassResponse is the response type for the Query/Class RPC method */ +export interface QueryClassResponseSDKType { + class?: ClassSDKType; +} +/** QueryClassesRequest is the request type for the Query/Classes RPC method */ +export interface QueryClassesRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryClassesRequest is the request type for the Query/Classes RPC method */ +export interface QueryClassesRequestSDKType { + pagination?: PageRequestSDKType; +} +/** QueryClassesResponse is the response type for the Query/Classes RPC method */ +export interface QueryClassesResponse { + classes: Class[]; + pagination?: PageResponse; +} +/** QueryClassesResponse is the response type for the Query/Classes RPC method */ +export interface QueryClassesResponseSDKType { + classes: ClassSDKType[]; + pagination?: PageResponseSDKType; +} +export declare const QueryBalanceRequest: { + encode(message: QueryBalanceRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryBalanceRequest; + fromPartial(object: DeepPartial): QueryBalanceRequest; +}; +export declare const QueryBalanceResponse: { + encode(message: QueryBalanceResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryBalanceResponse; + fromPartial(object: DeepPartial): QueryBalanceResponse; +}; +export declare const QueryOwnerRequest: { + encode(message: QueryOwnerRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryOwnerRequest; + fromPartial(object: DeepPartial): QueryOwnerRequest; +}; +export declare const QueryOwnerResponse: { + encode(message: QueryOwnerResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryOwnerResponse; + fromPartial(object: DeepPartial): QueryOwnerResponse; +}; +export declare const QuerySupplyRequest: { + encode(message: QuerySupplyRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySupplyRequest; + fromPartial(object: DeepPartial): QuerySupplyRequest; +}; +export declare const QuerySupplyResponse: { + encode(message: QuerySupplyResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySupplyResponse; + fromPartial(object: DeepPartial): QuerySupplyResponse; +}; +export declare const QueryNFTsRequest: { + encode(message: QueryNFTsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNFTsRequest; + fromPartial(object: DeepPartial): QueryNFTsRequest; +}; +export declare const QueryNFTsResponse: { + encode(message: QueryNFTsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNFTsResponse; + fromPartial(object: DeepPartial): QueryNFTsResponse; +}; +export declare const QueryNFTRequest: { + encode(message: QueryNFTRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNFTRequest; + fromPartial(object: DeepPartial): QueryNFTRequest; +}; +export declare const QueryNFTResponse: { + encode(message: QueryNFTResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNFTResponse; + fromPartial(object: DeepPartial): QueryNFTResponse; +}; +export declare const QueryClassRequest: { + encode(message: QueryClassRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClassRequest; + fromPartial(object: DeepPartial): QueryClassRequest; +}; +export declare const QueryClassResponse: { + encode(message: QueryClassResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClassResponse; + fromPartial(object: DeepPartial): QueryClassResponse; +}; +export declare const QueryClassesRequest: { + encode(message: QueryClassesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClassesRequest; + fromPartial(object: DeepPartial): QueryClassesRequest; +}; +export declare const QueryClassesResponse: { + encode(message: QueryClassesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClassesResponse; + fromPartial(object: DeepPartial): QueryClassesResponse; +}; diff --git a/packages/codegen/dist/cosmos/nft/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/nft/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..d5be0983 --- /dev/null +++ b/packages/codegen/dist/cosmos/nft/v1beta1/query.lcd.d.ts @@ -0,0 +1,15 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryBalanceRequest, QueryBalanceResponseSDKType, QueryOwnerRequest, QueryOwnerResponseSDKType, QuerySupplyRequest, QuerySupplyResponseSDKType, QueryNFTsRequest, QueryNFTsResponseSDKType, QueryNFTRequest, QueryNFTResponseSDKType, QueryClassRequest, QueryClassResponseSDKType, QueryClassesRequest, QueryClassesResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + balance(params: QueryBalanceRequest): Promise; + owner(params: QueryOwnerRequest): Promise; + supply(params: QuerySupplyRequest): Promise; + nFTs(params: QueryNFTsRequest): Promise; + nFT(params: QueryNFTRequest): Promise; + class(params: QueryClassRequest): Promise; + classes(params?: QueryClassesRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/nft/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/nft/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..c8c650c0 --- /dev/null +++ b/packages/codegen/dist/cosmos/nft/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,43 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryBalanceRequest, QueryBalanceResponse, QueryOwnerRequest, QueryOwnerResponse, QuerySupplyRequest, QuerySupplyResponse, QueryNFTsRequest, QueryNFTsResponse, QueryNFTRequest, QueryNFTResponse, QueryClassRequest, QueryClassResponse, QueryClassesRequest, QueryClassesResponse } from "./query"; +/** Query defines the gRPC querier service. */ +export interface Query { + /** Balance queries the number of NFTs of a given class owned by the owner, same as balanceOf in ERC721 */ + balance(request: QueryBalanceRequest): Promise; + /** Owner queries the owner of the NFT based on its class and id, same as ownerOf in ERC721 */ + owner(request: QueryOwnerRequest): Promise; + /** Supply queries the number of NFTs from the given class, same as totalSupply of ERC721. */ + supply(request: QuerySupplyRequest): Promise; + /** + * NFTs queries all NFTs of a given class or owner,choose at least one of the two, similar to tokenByIndex in + * ERC721Enumerable + */ + nFTs(request: QueryNFTsRequest): Promise; + /** NFT queries an NFT based on its class and id. */ + nFT(request: QueryNFTRequest): Promise; + /** Class queries an NFT class based on its id */ + class(request: QueryClassRequest): Promise; + /** Classes queries all NFT classes */ + classes(request?: QueryClassesRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + balance(request: QueryBalanceRequest): Promise; + owner(request: QueryOwnerRequest): Promise; + supply(request: QuerySupplyRequest): Promise; + nFTs(request: QueryNFTsRequest): Promise; + nFT(request: QueryNFTRequest): Promise; + class(request: QueryClassRequest): Promise; + classes(request?: QueryClassesRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + balance(request: QueryBalanceRequest): Promise; + owner(request: QueryOwnerRequest): Promise; + supply(request: QuerySupplyRequest): Promise; + nFTs(request: QueryNFTsRequest): Promise; + nFT(request: QueryNFTRequest): Promise; + class(request: QueryClassRequest): Promise; + classes(request?: QueryClassesRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/nft/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/nft/v1beta1/tx.d.ts new file mode 100644 index 00000000..ee238b75 --- /dev/null +++ b/packages/codegen/dist/cosmos/nft/v1beta1/tx.d.ts @@ -0,0 +1,36 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** MsgSend represents a message to send a nft from one account to another account. */ +export interface MsgSend { + /** class_id defines the unique identifier of the nft classification, similar to the contract address of ERC721 */ + classId: string; + /** id defines the unique identification of nft */ + id: string; + /** sender is the address of the owner of nft */ + sender: string; + /** receiver is the receiver address of nft */ + receiver: string; +} +/** MsgSend represents a message to send a nft from one account to another account. */ +export interface MsgSendSDKType { + class_id: string; + id: string; + sender: string; + receiver: string; +} +/** MsgSendResponse defines the Msg/Send response type. */ +export interface MsgSendResponse { +} +/** MsgSendResponse defines the Msg/Send response type. */ +export interface MsgSendResponseSDKType { +} +export declare const MsgSend: { + encode(message: MsgSend, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSend; + fromPartial(object: DeepPartial): MsgSend; +}; +export declare const MsgSendResponse: { + encode(_: MsgSendResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSendResponse; + fromPartial(_: DeepPartial): MsgSendResponse; +}; diff --git a/packages/codegen/dist/cosmos/nft/v1beta1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/nft/v1beta1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..2c60038c --- /dev/null +++ b/packages/codegen/dist/cosmos/nft/v1beta1/tx.rpc.msg.d.ts @@ -0,0 +1,12 @@ +import { Rpc } from "../../../helpers"; +import { MsgSend, MsgSendResponse } from "./tx"; +/** Msg defines the nft Msg service. */ +export interface Msg { + /** Send defines a method to send a nft from one account to another account. */ + send(request: MsgSend): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + send(request: MsgSend): Promise; +} diff --git a/packages/codegen/dist/cosmos/orm/v1/orm.d.ts b/packages/codegen/dist/cosmos/orm/v1/orm.d.ts new file mode 100644 index 00000000..8a6df721 --- /dev/null +++ b/packages/codegen/dist/cosmos/orm/v1/orm.d.ts @@ -0,0 +1,130 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** TableDescriptor describes an ORM table. */ +export interface TableDescriptor { + /** primary_key defines the primary key for the table. */ + primaryKey?: PrimaryKeyDescriptor; + /** index defines one or more secondary indexes. */ + index: SecondaryIndexDescriptor[]; + /** + * id is a non-zero integer ID that must be unique within the + * tables and singletons in this file. It may be deprecated in the future when this + * can be auto-generated. + */ + id: number; +} +/** TableDescriptor describes an ORM table. */ +export interface TableDescriptorSDKType { + primary_key?: PrimaryKeyDescriptorSDKType; + index: SecondaryIndexDescriptorSDKType[]; + id: number; +} +/** PrimaryKeyDescriptor describes a table primary key. */ +export interface PrimaryKeyDescriptor { + /** + * fields is a comma-separated list of fields in the primary key. Spaces are + * not allowed. Supported field types, their encodings, and any applicable constraints + * are described below. + * - uint32 are encoded as 2,3,4 or 5 bytes using a compact encoding that + * is suitable for sorted iteration (not varint encoding). This type is + * well-suited for small integers. + * - uint64 are encoded as 2,4,6 or 9 bytes using a compact encoding that + * is suitable for sorted iteration (not varint encoding). This type is + * well-suited for small integers such as auto-incrementing sequences. + * - fixed32, fixed64 are encoded as big-endian fixed width bytes and support + * sorted iteration. These types are well-suited for encoding fixed with + * decimals as integers. + * - string's are encoded as raw bytes in terminal key segments and null-terminated + * in non-terminal segments. Null characters are thus forbidden in strings. + * string fields support sorted iteration. + * - bytes are encoded as raw bytes in terminal segments and length-prefixed + * with a 32-bit unsigned varint in non-terminal segments. + * - int32, sint32, int64, sint64, sfixed32, sfixed64 are encoded as fixed width bytes with + * an encoding that enables sorted iteration. + * - google.protobuf.Timestamp and google.protobuf.Duration are encoded + * as 12 bytes using an encoding that enables sorted iteration. + * - enum fields are encoded using varint encoding and do not support sorted + * iteration. + * - bool fields are encoded as a single byte 0 or 1. + * + * All other fields types are unsupported in keys including repeated and + * oneof fields. + * + * Primary keys are prefixed by the varint encoded table id and the byte 0x0 + * plus any additional prefix specified by the schema. + */ + fields: string; + /** + * auto_increment specifies that the primary key is generated by an + * auto-incrementing integer. If this is set to true fields must only + * contain one field of that is of type uint64. + */ + autoIncrement: boolean; +} +/** PrimaryKeyDescriptor describes a table primary key. */ +export interface PrimaryKeyDescriptorSDKType { + fields: string; + auto_increment: boolean; +} +/** PrimaryKeyDescriptor describes a table secondary index. */ +export interface SecondaryIndexDescriptor { + /** + * fields is a comma-separated list of fields in the index. The supported + * field types are the same as those for PrimaryKeyDescriptor.fields. + * Index keys are prefixed by the varint encoded table id and the varint + * encoded index id plus any additional prefix specified by the schema. + * + * In addition the the field segments, non-unique index keys are suffixed with + * any additional primary key fields not present in the index fields so that the + * primary key can be reconstructed. Unique indexes instead of being suffixed + * store the remaining primary key fields in the value.. + */ + fields: string; + /** + * id is a non-zero integer ID that must be unique within the indexes for this + * table and less than 32768. It may be deprecated in the future when this can + * be auto-generated. + */ + id: number; + /** unique specifies that this an unique index. */ + unique: boolean; +} +/** PrimaryKeyDescriptor describes a table secondary index. */ +export interface SecondaryIndexDescriptorSDKType { + fields: string; + id: number; + unique: boolean; +} +/** TableDescriptor describes an ORM singleton table which has at most one instance. */ +export interface SingletonDescriptor { + /** + * id is a non-zero integer ID that must be unique within the + * tables and singletons in this file. It may be deprecated in the future when this + * can be auto-generated. + */ + id: number; +} +/** TableDescriptor describes an ORM singleton table which has at most one instance. */ +export interface SingletonDescriptorSDKType { + id: number; +} +export declare const TableDescriptor: { + encode(message: TableDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TableDescriptor; + fromPartial(object: DeepPartial): TableDescriptor; +}; +export declare const PrimaryKeyDescriptor: { + encode(message: PrimaryKeyDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PrimaryKeyDescriptor; + fromPartial(object: DeepPartial): PrimaryKeyDescriptor; +}; +export declare const SecondaryIndexDescriptor: { + encode(message: SecondaryIndexDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SecondaryIndexDescriptor; + fromPartial(object: DeepPartial): SecondaryIndexDescriptor; +}; +export declare const SingletonDescriptor: { + encode(message: SingletonDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SingletonDescriptor; + fromPartial(object: DeepPartial): SingletonDescriptor; +}; diff --git a/packages/codegen/dist/cosmos/orm/v1alpha1/schema.d.ts b/packages/codegen/dist/cosmos/orm/v1alpha1/schema.d.ts new file mode 100644 index 00000000..66027fa8 --- /dev/null +++ b/packages/codegen/dist/cosmos/orm/v1alpha1/schema.d.ts @@ -0,0 +1,98 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** StorageType */ +export declare enum StorageType { + /** + * STORAGE_TYPE_DEFAULT_UNSPECIFIED - STORAGE_TYPE_DEFAULT_UNSPECIFIED indicates the persistent + * KV-storage where primary key entries are stored in merkle-tree + * backed commitment storage and indexes and seqs are stored in + * fast index storage. Note that the Cosmos SDK before store/v2alpha1 + * does not support this. + */ + STORAGE_TYPE_DEFAULT_UNSPECIFIED = 0, + /** + * STORAGE_TYPE_MEMORY - STORAGE_TYPE_MEMORY indicates in-memory storage that will be + * reloaded every time an app restarts. Tables with this type of storage + * will by default be ignored when importing and exporting a module's + * state from JSON. + */ + STORAGE_TYPE_MEMORY = 1, + /** + * STORAGE_TYPE_TRANSIENT - STORAGE_TYPE_TRANSIENT indicates transient storage that is reset + * at the end of every block. Tables with this type of storage + * will by default be ignored when importing and exporting a module's + * state from JSON. + */ + STORAGE_TYPE_TRANSIENT = 2, + /** + * STORAGE_TYPE_INDEX - STORAGE_TYPE_INDEX indicates persistent storage which is not backed + * by a merkle-tree and won't affect the app hash. Note that the Cosmos SDK + * before store/v2alpha1 does not support this. + */ + STORAGE_TYPE_INDEX = 3, + /** + * STORAGE_TYPE_COMMITMENT - STORAGE_TYPE_INDEX indicates persistent storage which is backed by + * a merkle-tree. With this type of storage, both primary and index keys + * will affect the app hash and this is generally less efficient + * than using STORAGE_TYPE_DEFAULT_UNSPECIFIED which separates index + * keys into index storage. Note that modules built with the + * Cosmos SDK before store/v2alpha1 must specify STORAGE_TYPE_COMMITMENT + * instead of STORAGE_TYPE_DEFAULT_UNSPECIFIED or STORAGE_TYPE_INDEX + * because this is the only type of persistent storage available. + */ + STORAGE_TYPE_COMMITMENT = 4, + UNRECOGNIZED = -1 +} +export declare const StorageTypeSDKType: typeof StorageType; +export declare function storageTypeFromJSON(object: any): StorageType; +export declare function storageTypeToJSON(object: StorageType): string; +/** ModuleSchemaDescriptor describe's a module's ORM schema. */ +export interface ModuleSchemaDescriptor { + schemaFile: ModuleSchemaDescriptor_FileEntry[]; + /** + * prefix is an optional prefix that precedes all keys in this module's + * store. + */ + prefix: Uint8Array; +} +/** ModuleSchemaDescriptor describe's a module's ORM schema. */ +export interface ModuleSchemaDescriptorSDKType { + schema_file: ModuleSchemaDescriptor_FileEntrySDKType[]; + prefix: Uint8Array; +} +/** FileEntry describes an ORM file used in a module. */ +export interface ModuleSchemaDescriptor_FileEntry { + /** + * id is a prefix that will be varint encoded and prepended to all the + * table keys specified in the file's tables. + */ + id: number; + /** + * proto_file_name is the name of a file .proto in that contains + * table definitions. The .proto file must be in a package that the + * module has referenced using cosmos.app.v1.ModuleDescriptor.use_package. + */ + protoFileName: string; + /** + * storage_type optionally indicates the type of storage this file's + * tables should used. If it is left unspecified, the default KV-storage + * of the app will be used. + */ + storageType: StorageType; +} +/** FileEntry describes an ORM file used in a module. */ +export interface ModuleSchemaDescriptor_FileEntrySDKType { + id: number; + proto_file_name: string; + storage_type: StorageType; +} +export declare const ModuleSchemaDescriptor: { + encode(message: ModuleSchemaDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleSchemaDescriptor; + fromPartial(object: DeepPartial): ModuleSchemaDescriptor; +}; +export declare const ModuleSchemaDescriptor_FileEntry: { + encode(message: ModuleSchemaDescriptor_FileEntry, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleSchemaDescriptor_FileEntry; + fromPartial(object: DeepPartial): ModuleSchemaDescriptor_FileEntry; +}; diff --git a/packages/codegen/dist/cosmos/params/v1beta1/params.d.ts b/packages/codegen/dist/cosmos/params/v1beta1/params.d.ts new file mode 100644 index 00000000..44a12b09 --- /dev/null +++ b/packages/codegen/dist/cosmos/params/v1beta1/params.d.ts @@ -0,0 +1,42 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** ParameterChangeProposal defines a proposal to change one or more parameters. */ +export interface ParameterChangeProposal { + title: string; + description: string; + changes: ParamChange[]; +} +/** ParameterChangeProposal defines a proposal to change one or more parameters. */ +export interface ParameterChangeProposalSDKType { + title: string; + description: string; + changes: ParamChangeSDKType[]; +} +/** + * ParamChange defines an individual parameter change, for use in + * ParameterChangeProposal. + */ +export interface ParamChange { + subspace: string; + key: string; + value: string; +} +/** + * ParamChange defines an individual parameter change, for use in + * ParameterChangeProposal. + */ +export interface ParamChangeSDKType { + subspace: string; + key: string; + value: string; +} +export declare const ParameterChangeProposal: { + encode(message: ParameterChangeProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ParameterChangeProposal; + fromPartial(object: DeepPartial): ParameterChangeProposal; +}; +export declare const ParamChange: { + encode(message: ParamChange, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ParamChange; + fromPartial(object: DeepPartial): ParamChange; +}; diff --git a/packages/codegen/dist/cosmos/params/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/params/v1beta1/query.d.ts new file mode 100644 index 00000000..6cf07c14 --- /dev/null +++ b/packages/codegen/dist/cosmos/params/v1beta1/query.d.ts @@ -0,0 +1,91 @@ +import { ParamChange, ParamChangeSDKType } from "./params"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryParamsRequest is request type for the Query/Params RPC method. */ +export interface QueryParamsRequest { + /** subspace defines the module to query the parameter for. */ + subspace: string; + /** key defines the key of the parameter in the subspace. */ + key: string; +} +/** QueryParamsRequest is request type for the Query/Params RPC method. */ +export interface QueryParamsRequestSDKType { + subspace: string; + key: string; +} +/** QueryParamsResponse is response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** param defines the queried parameter. */ + param?: ParamChange; +} +/** QueryParamsResponse is response type for the Query/Params RPC method. */ +export interface QueryParamsResponseSDKType { + param?: ParamChangeSDKType; +} +/** + * QuerySubspacesRequest defines a request type for querying for all registered + * subspaces and all keys for a subspace. + */ +export interface QuerySubspacesRequest { +} +/** + * QuerySubspacesRequest defines a request type for querying for all registered + * subspaces and all keys for a subspace. + */ +export interface QuerySubspacesRequestSDKType { +} +/** + * QuerySubspacesResponse defines the response types for querying for all + * registered subspaces and all keys for a subspace. + */ +export interface QuerySubspacesResponse { + subspaces: Subspace[]; +} +/** + * QuerySubspacesResponse defines the response types for querying for all + * registered subspaces and all keys for a subspace. + */ +export interface QuerySubspacesResponseSDKType { + subspaces: SubspaceSDKType[]; +} +/** + * Subspace defines a parameter subspace name and all the keys that exist for + * the subspace. + */ +export interface Subspace { + subspace: string; + keys: string[]; +} +/** + * Subspace defines a parameter subspace name and all the keys that exist for + * the subspace. + */ +export interface SubspaceSDKType { + subspace: string; + keys: string[]; +} +export declare const QueryParamsRequest: { + encode(message: QueryParamsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest; + fromPartial(object: DeepPartial): QueryParamsRequest; +}; +export declare const QueryParamsResponse: { + encode(message: QueryParamsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse; + fromPartial(object: DeepPartial): QueryParamsResponse; +}; +export declare const QuerySubspacesRequest: { + encode(_: QuerySubspacesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySubspacesRequest; + fromPartial(_: DeepPartial): QuerySubspacesRequest; +}; +export declare const QuerySubspacesResponse: { + encode(message: QuerySubspacesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySubspacesResponse; + fromPartial(object: DeepPartial): QuerySubspacesResponse; +}; +export declare const Subspace: { + encode(message: Subspace, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Subspace; + fromPartial(object: DeepPartial): Subspace; +}; diff --git a/packages/codegen/dist/cosmos/params/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/params/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..4d76132d --- /dev/null +++ b/packages/codegen/dist/cosmos/params/v1beta1/query.lcd.d.ts @@ -0,0 +1,10 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryParamsRequest, QueryParamsResponseSDKType, QuerySubspacesRequest, QuerySubspacesResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + params(params: QueryParamsRequest): Promise; + subspaces(_params?: QuerySubspacesRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/params/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/params/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..f9fe0603 --- /dev/null +++ b/packages/codegen/dist/cosmos/params/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,23 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryParamsRequest, QueryParamsResponse, QuerySubspacesRequest, QuerySubspacesResponse } from "./query"; +/** Query defines the gRPC querier service. */ +export interface Query { + /** + * Params queries a specific parameter of a module, given its subspace and + * key. + */ + params(request: QueryParamsRequest): Promise; + /** Subspaces queries for all registered subspaces and all keys for a subspace. */ + subspaces(request?: QuerySubspacesRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + params(request: QueryParamsRequest): Promise; + subspaces(request?: QuerySubspacesRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + params(request: QueryParamsRequest): Promise; + subspaces(request?: QuerySubspacesRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/rpc.query.d.ts b/packages/codegen/dist/cosmos/rpc.query.d.ts new file mode 100644 index 00000000..69bbeacd --- /dev/null +++ b/packages/codegen/dist/cosmos/rpc.query.d.ts @@ -0,0 +1,187 @@ +import { HttpEndpoint } from "@cosmjs/tendermint-rpc"; +export declare const createRPCQueryClient: ({ rpcEndpoint }: { + rpcEndpoint: string | HttpEndpoint; +}) => Promise<{ + cosmos: { + app: { + v1alpha1: { + config(request?: import("./app/v1alpha1/query").QueryConfigRequest): Promise; + }; + }; + auth: { + v1beta1: { + accounts(request?: import("./auth/v1beta1/query").QueryAccountsRequest): Promise; + account(request: import("./auth/v1beta1/query").QueryAccountRequest): Promise; + params(request?: import("./auth/v1beta1/query").QueryParamsRequest): Promise; + moduleAccounts(request?: import("./auth/v1beta1/query").QueryModuleAccountsRequest): Promise; + bech32Prefix(request?: import("./auth/v1beta1/query").Bech32PrefixRequest): Promise; + addressBytesToString(request: import("./auth/v1beta1/query").AddressBytesToStringRequest): Promise; + addressStringToBytes(request: import("./auth/v1beta1/query").AddressStringToBytesRequest): Promise; + }; + }; + authz: { + v1beta1: { + grants(request: import("./authz/v1beta1/query").QueryGrantsRequest): Promise; + granterGrants(request: import("./authz/v1beta1/query").QueryGranterGrantsRequest): Promise; + granteeGrants(request: import("./authz/v1beta1/query").QueryGranteeGrantsRequest): Promise; + }; + }; + bank: { + v1beta1: { + balance(request: import("./bank/v1beta1/query").QueryBalanceRequest): Promise; + allBalances(request: import("./bank/v1beta1/query").QueryAllBalancesRequest): Promise; + spendableBalances(request: import("./bank/v1beta1/query").QuerySpendableBalancesRequest): Promise; + totalSupply(request?: import("./bank/v1beta1/query").QueryTotalSupplyRequest): Promise; + supplyOf(request: import("./bank/v1beta1/query").QuerySupplyOfRequest): Promise; + params(request?: import("./bank/v1beta1/query").QueryParamsRequest): Promise; + denomMetadata(request: import("./bank/v1beta1/query").QueryDenomMetadataRequest): Promise; + denomsMetadata(request?: import("./bank/v1beta1/query").QueryDenomsMetadataRequest): Promise; + denomOwners(request: import("./bank/v1beta1/query").QueryDenomOwnersRequest): Promise; + }; + }; + base: { + tendermint: { + v1beta1: { + getNodeInfo(request?: import("./base/tendermint/v1beta1/query").GetNodeInfoRequest): Promise; + getSyncing(request?: import("./base/tendermint/v1beta1/query").GetSyncingRequest): Promise; + getLatestBlock(request?: import("./base/tendermint/v1beta1/query").GetLatestBlockRequest): Promise; + getBlockByHeight(request: import("./base/tendermint/v1beta1/query").GetBlockByHeightRequest): Promise; + getLatestValidatorSet(request?: import("./base/tendermint/v1beta1/query").GetLatestValidatorSetRequest): Promise; + getValidatorSetByHeight(request: import("./base/tendermint/v1beta1/query").GetValidatorSetByHeightRequest): Promise; + }; + }; + }; + distribution: { + v1beta1: { + params(request?: import("./distribution/v1beta1/query").QueryParamsRequest): Promise; + validatorOutstandingRewards(request: import("./distribution/v1beta1/query").QueryValidatorOutstandingRewardsRequest): Promise; + validatorCommission(request: import("./distribution/v1beta1/query").QueryValidatorCommissionRequest): Promise; + validatorSlashes(request: import("./distribution/v1beta1/query").QueryValidatorSlashesRequest): Promise; + delegationRewards(request: import("./distribution/v1beta1/query").QueryDelegationRewardsRequest): Promise; + delegationTotalRewards(request: import("./distribution/v1beta1/query").QueryDelegationTotalRewardsRequest): Promise; + delegatorValidators(request: import("./distribution/v1beta1/query").QueryDelegatorValidatorsRequest): Promise; + delegatorWithdrawAddress(request: import("./distribution/v1beta1/query").QueryDelegatorWithdrawAddressRequest): Promise; + communityPool(request?: import("./distribution/v1beta1/query").QueryCommunityPoolRequest): Promise; + }; + }; + evidence: { + v1beta1: { + evidence(request: import("./evidence/v1beta1/query").QueryEvidenceRequest): Promise; + allEvidence(request?: import("./evidence/v1beta1/query").QueryAllEvidenceRequest): Promise; + }; + }; + feegrant: { + v1beta1: { + allowance(request: import("./feegrant/v1beta1/query").QueryAllowanceRequest): Promise; + allowances(request: import("./feegrant/v1beta1/query").QueryAllowancesRequest): Promise; + allowancesByGranter(request: import("./feegrant/v1beta1/query").QueryAllowancesByGranterRequest): Promise; + }; + }; + gov: { + v1: { + proposal(request: import("./gov/v1/query").QueryProposalRequest): Promise; + proposals(request: import("./gov/v1/query").QueryProposalsRequest): Promise; + vote(request: import("./gov/v1/query").QueryVoteRequest): Promise; + votes(request: import("./gov/v1/query").QueryVotesRequest): Promise; + params(request: import("./gov/v1/query").QueryParamsRequest): Promise; + deposit(request: import("./gov/v1/query").QueryDepositRequest): Promise; + deposits(request: import("./gov/v1/query").QueryDepositsRequest): Promise; + tallyResult(request: import("./gov/v1/query").QueryTallyResultRequest): Promise; + }; + v1beta1: { + proposal(request: import("./gov/v1beta1/query").QueryProposalRequest): Promise; + proposals(request: import("./gov/v1beta1/query").QueryProposalsRequest): Promise; + vote(request: import("./gov/v1beta1/query").QueryVoteRequest): Promise; + votes(request: import("./gov/v1beta1/query").QueryVotesRequest): Promise; + params(request: import("./gov/v1beta1/query").QueryParamsRequest): Promise; + deposit(request: import("./gov/v1beta1/query").QueryDepositRequest): Promise; + deposits(request: import("./gov/v1beta1/query").QueryDepositsRequest): Promise; + tallyResult(request: import("./gov/v1beta1/query").QueryTallyResultRequest): Promise; + }; + }; + group: { + v1: { + groupInfo(request: import("./group/v1/query").QueryGroupInfoRequest): Promise; + groupPolicyInfo(request: import("./group/v1/query").QueryGroupPolicyInfoRequest): Promise; + groupMembers(request: import("./group/v1/query").QueryGroupMembersRequest): Promise; + groupsByAdmin(request: import("./group/v1/query").QueryGroupsByAdminRequest): Promise; + groupPoliciesByGroup(request: import("./group/v1/query").QueryGroupPoliciesByGroupRequest): Promise; + groupPoliciesByAdmin(request: import("./group/v1/query").QueryGroupPoliciesByAdminRequest): Promise; + proposal(request: import("./group/v1/query").QueryProposalRequest): Promise; + proposalsByGroupPolicy(request: import("./group/v1/query").QueryProposalsByGroupPolicyRequest): Promise; + voteByProposalVoter(request: import("./group/v1/query").QueryVoteByProposalVoterRequest): Promise; + votesByProposal(request: import("./group/v1/query").QueryVotesByProposalRequest): Promise; + votesByVoter(request: import("./group/v1/query").QueryVotesByVoterRequest): Promise; + groupsByMember(request: import("./group/v1/query").QueryGroupsByMemberRequest): Promise; + tallyResult(request: import("./group/v1/query").QueryTallyResultRequest): Promise; + }; + }; + mint: { + v1beta1: { + params(request?: import("./mint/v1beta1/query").QueryParamsRequest): Promise; + inflation(request?: import("./mint/v1beta1/query").QueryInflationRequest): Promise; + annualProvisions(request?: import("./mint/v1beta1/query").QueryAnnualProvisionsRequest): Promise; + }; + }; + nft: { + v1beta1: { + balance(request: import("./nft/v1beta1/query").QueryBalanceRequest): Promise; + owner(request: import("./nft/v1beta1/query").QueryOwnerRequest): Promise; + supply(request: import("./nft/v1beta1/query").QuerySupplyRequest): Promise; + nFTs(request: import("./nft/v1beta1/query").QueryNFTsRequest): Promise; + nFT(request: import("./nft/v1beta1/query").QueryNFTRequest): Promise; + class(request: import("./nft/v1beta1/query").QueryClassRequest): Promise; + classes(request?: import("./nft/v1beta1/query").QueryClassesRequest): Promise; + }; + }; + params: { + v1beta1: { + params(request: import("./params/v1beta1/query").QueryParamsRequest): Promise; + subspaces(request?: import("./params/v1beta1/query").QuerySubspacesRequest): Promise; + }; + }; + slashing: { + v1beta1: { + params(request?: import("./slashing/v1beta1/query").QueryParamsRequest): Promise; + signingInfo(request: import("./slashing/v1beta1/query").QuerySigningInfoRequest): Promise; + signingInfos(request?: import("./slashing/v1beta1/query").QuerySigningInfosRequest): Promise; + }; + }; + staking: { + v1beta1: { + validators(request: import("./staking/v1beta1/query").QueryValidatorsRequest): Promise; + validator(request: import("./staking/v1beta1/query").QueryValidatorRequest): Promise; + validatorDelegations(request: import("./staking/v1beta1/query").QueryValidatorDelegationsRequest): Promise; + validatorUnbondingDelegations(request: import("./staking/v1beta1/query").QueryValidatorUnbondingDelegationsRequest): Promise; + delegation(request: import("./staking/v1beta1/query").QueryDelegationRequest): Promise; + unbondingDelegation(request: import("./staking/v1beta1/query").QueryUnbondingDelegationRequest): Promise; + delegatorDelegations(request: import("./staking/v1beta1/query").QueryDelegatorDelegationsRequest): Promise; + delegatorUnbondingDelegations(request: import("./staking/v1beta1/query").QueryDelegatorUnbondingDelegationsRequest): Promise; + redelegations(request: import("./staking/v1beta1/query").QueryRedelegationsRequest): Promise; + delegatorValidators(request: import("./staking/v1beta1/query").QueryDelegatorValidatorsRequest): Promise; + delegatorValidator(request: import("./staking/v1beta1/query").QueryDelegatorValidatorRequest): Promise; + historicalInfo(request: import("./staking/v1beta1/query").QueryHistoricalInfoRequest): Promise; + pool(request?: import("./staking/v1beta1/query").QueryPoolRequest): Promise; + params(request?: import("./staking/v1beta1/query").QueryParamsRequest): Promise; + }; + }; + tx: { + v1beta1: { + simulate(request: import("./tx/v1beta1/service").SimulateRequest): Promise; + getTx(request: import("./tx/v1beta1/service").GetTxRequest): Promise; + broadcastTx(request: import("./tx/v1beta1/service").BroadcastTxRequest): Promise; + getTxsEvent(request: import("./tx/v1beta1/service").GetTxsEventRequest): Promise; + getBlockWithTxs(request: import("./tx/v1beta1/service").GetBlockWithTxsRequest): Promise; + }; + }; + upgrade: { + v1beta1: { + currentPlan(request?: import("./upgrade/v1beta1/query").QueryCurrentPlanRequest): Promise; + appliedPlan(request: import("./upgrade/v1beta1/query").QueryAppliedPlanRequest): Promise; + upgradedConsensusState(request: import("./upgrade/v1beta1/query").QueryUpgradedConsensusStateRequest): Promise; + moduleVersions(request: import("./upgrade/v1beta1/query").QueryModuleVersionsRequest): Promise; + authority(request?: import("./upgrade/v1beta1/query").QueryAuthorityRequest): Promise; + }; + }; + }; +}>; diff --git a/packages/codegen/dist/cosmos/rpc.tx.d.ts b/packages/codegen/dist/cosmos/rpc.tx.d.ts new file mode 100644 index 00000000..4e07ea0a --- /dev/null +++ b/packages/codegen/dist/cosmos/rpc.tx.d.ts @@ -0,0 +1,47 @@ +import { Rpc } from "../helpers"; +export declare const createRPCMsgClient: ({ rpc }: { + rpc: Rpc; +}) => Promise<{ + cosmos: { + authz: { + v1beta1: import("./authz/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + bank: { + v1beta1: import("./bank/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + crisis: { + v1beta1: import("./crisis/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + distribution: { + v1beta1: import("./distribution/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + evidence: { + v1beta1: import("./evidence/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + feegrant: { + v1beta1: import("./feegrant/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + gov: { + v1: import("./gov/v1/tx.rpc.msg").MsgClientImpl; + v1beta1: import("./gov/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + group: { + v1: import("./group/v1/tx.rpc.msg").MsgClientImpl; + }; + nft: { + v1beta1: import("./nft/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + slashing: { + v1beta1: import("./slashing/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + staking: { + v1beta1: import("./staking/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + upgrade: { + v1beta1: import("./upgrade/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + vesting: { + v1beta1: import("./vesting/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + }; +}>; diff --git a/packages/codegen/dist/cosmos/slashing/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/slashing/v1beta1/genesis.d.ts new file mode 100644 index 00000000..11781a4c --- /dev/null +++ b/packages/codegen/dist/cosmos/slashing/v1beta1/genesis.d.ts @@ -0,0 +1,87 @@ +/// +import { Params, ParamsSDKType, ValidatorSigningInfo, ValidatorSigningInfoSDKType } from "./slashing"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** GenesisState defines the slashing module's genesis state. */ +export interface GenesisState { + /** params defines all the paramaters of related to deposit. */ + params?: Params; + /** + * signing_infos represents a map between validator addresses and their + * signing infos. + */ + signingInfos: SigningInfo[]; + /** + * missed_blocks represents a map between validator addresses and their + * missed blocks. + */ + missedBlocks: ValidatorMissedBlocks[]; +} +/** GenesisState defines the slashing module's genesis state. */ +export interface GenesisStateSDKType { + params?: ParamsSDKType; + signing_infos: SigningInfoSDKType[]; + missed_blocks: ValidatorMissedBlocksSDKType[]; +} +/** SigningInfo stores validator signing info of corresponding address. */ +export interface SigningInfo { + /** address is the validator address. */ + address: string; + /** validator_signing_info represents the signing info of this validator. */ + validatorSigningInfo?: ValidatorSigningInfo; +} +/** SigningInfo stores validator signing info of corresponding address. */ +export interface SigningInfoSDKType { + address: string; + validator_signing_info?: ValidatorSigningInfoSDKType; +} +/** + * ValidatorMissedBlocks contains array of missed blocks of corresponding + * address. + */ +export interface ValidatorMissedBlocks { + /** address is the validator address. */ + address: string; + /** missed_blocks is an array of missed blocks by the validator. */ + missedBlocks: MissedBlock[]; +} +/** + * ValidatorMissedBlocks contains array of missed blocks of corresponding + * address. + */ +export interface ValidatorMissedBlocksSDKType { + address: string; + missed_blocks: MissedBlockSDKType[]; +} +/** MissedBlock contains height and missed status as boolean. */ +export interface MissedBlock { + /** index is the height at which the block was missed. */ + index: Long; + /** missed is the missed status. */ + missed: boolean; +} +/** MissedBlock contains height and missed status as boolean. */ +export interface MissedBlockSDKType { + index: Long; + missed: boolean; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; +export declare const SigningInfo: { + encode(message: SigningInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SigningInfo; + fromPartial(object: DeepPartial): SigningInfo; +}; +export declare const ValidatorMissedBlocks: { + encode(message: ValidatorMissedBlocks, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorMissedBlocks; + fromPartial(object: DeepPartial): ValidatorMissedBlocks; +}; +export declare const MissedBlock: { + encode(message: MissedBlock, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MissedBlock; + fromPartial(object: DeepPartial): MissedBlock; +}; diff --git a/packages/codegen/dist/cosmos/slashing/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/slashing/v1beta1/query.d.ts new file mode 100644 index 00000000..f23f854f --- /dev/null +++ b/packages/codegen/dist/cosmos/slashing/v1beta1/query.d.ts @@ -0,0 +1,109 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Params, ParamsSDKType, ValidatorSigningInfo, ValidatorSigningInfoSDKType } from "./slashing"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryParamsRequest is the request type for the Query/Params RPC method */ +export interface QueryParamsRequest { +} +/** QueryParamsRequest is the request type for the Query/Params RPC method */ +export interface QueryParamsRequestSDKType { +} +/** QueryParamsResponse is the response type for the Query/Params RPC method */ +export interface QueryParamsResponse { + params?: Params; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method */ +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +/** + * QuerySigningInfoRequest is the request type for the Query/SigningInfo RPC + * method + */ +export interface QuerySigningInfoRequest { + /** cons_address is the address to query signing info of */ + consAddress: string; +} +/** + * QuerySigningInfoRequest is the request type for the Query/SigningInfo RPC + * method + */ +export interface QuerySigningInfoRequestSDKType { + cons_address: string; +} +/** + * QuerySigningInfoResponse is the response type for the Query/SigningInfo RPC + * method + */ +export interface QuerySigningInfoResponse { + /** val_signing_info is the signing info of requested val cons address */ + valSigningInfo?: ValidatorSigningInfo; +} +/** + * QuerySigningInfoResponse is the response type for the Query/SigningInfo RPC + * method + */ +export interface QuerySigningInfoResponseSDKType { + val_signing_info?: ValidatorSigningInfoSDKType; +} +/** + * QuerySigningInfosRequest is the request type for the Query/SigningInfos RPC + * method + */ +export interface QuerySigningInfosRequest { + pagination?: PageRequest; +} +/** + * QuerySigningInfosRequest is the request type for the Query/SigningInfos RPC + * method + */ +export interface QuerySigningInfosRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QuerySigningInfosResponse is the response type for the Query/SigningInfos RPC + * method + */ +export interface QuerySigningInfosResponse { + /** info is the signing info of all validators */ + info: ValidatorSigningInfo[]; + pagination?: PageResponse; +} +/** + * QuerySigningInfosResponse is the response type for the Query/SigningInfos RPC + * method + */ +export interface QuerySigningInfosResponseSDKType { + info: ValidatorSigningInfoSDKType[]; + pagination?: PageResponseSDKType; +} +export declare const QueryParamsRequest: { + encode(_: QueryParamsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest; + fromPartial(_: DeepPartial): QueryParamsRequest; +}; +export declare const QueryParamsResponse: { + encode(message: QueryParamsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse; + fromPartial(object: DeepPartial): QueryParamsResponse; +}; +export declare const QuerySigningInfoRequest: { + encode(message: QuerySigningInfoRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySigningInfoRequest; + fromPartial(object: DeepPartial): QuerySigningInfoRequest; +}; +export declare const QuerySigningInfoResponse: { + encode(message: QuerySigningInfoResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySigningInfoResponse; + fromPartial(object: DeepPartial): QuerySigningInfoResponse; +}; +export declare const QuerySigningInfosRequest: { + encode(message: QuerySigningInfosRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySigningInfosRequest; + fromPartial(object: DeepPartial): QuerySigningInfosRequest; +}; +export declare const QuerySigningInfosResponse: { + encode(message: QuerySigningInfosResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySigningInfosResponse; + fromPartial(object: DeepPartial): QuerySigningInfosResponse; +}; diff --git a/packages/codegen/dist/cosmos/slashing/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/slashing/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..68fa91bb --- /dev/null +++ b/packages/codegen/dist/cosmos/slashing/v1beta1/query.lcd.d.ts @@ -0,0 +1,11 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryParamsRequest, QueryParamsResponseSDKType, QuerySigningInfoRequest, QuerySigningInfoResponseSDKType, QuerySigningInfosRequest, QuerySigningInfosResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + params(_params?: QueryParamsRequest): Promise; + signingInfo(params: QuerySigningInfoRequest): Promise; + signingInfos(params?: QuerySigningInfosRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/slashing/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/slashing/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..ff339175 --- /dev/null +++ b/packages/codegen/dist/cosmos/slashing/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,24 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryParamsRequest, QueryParamsResponse, QuerySigningInfoRequest, QuerySigningInfoResponse, QuerySigningInfosRequest, QuerySigningInfosResponse } from "./query"; +/** Query provides defines the gRPC querier service */ +export interface Query { + /** Params queries the parameters of slashing module */ + params(request?: QueryParamsRequest): Promise; + /** SigningInfo queries the signing info of given cons address */ + signingInfo(request: QuerySigningInfoRequest): Promise; + /** SigningInfos queries signing info of all validators */ + signingInfos(request?: QuerySigningInfosRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + params(request?: QueryParamsRequest): Promise; + signingInfo(request: QuerySigningInfoRequest): Promise; + signingInfos(request?: QuerySigningInfosRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + params(request?: QueryParamsRequest): Promise; + signingInfo(request: QuerySigningInfoRequest): Promise; + signingInfos(request?: QuerySigningInfosRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/slashing/v1beta1/slashing.d.ts b/packages/codegen/dist/cosmos/slashing/v1beta1/slashing.d.ts new file mode 100644 index 00000000..b7dd7ad3 --- /dev/null +++ b/packages/codegen/dist/cosmos/slashing/v1beta1/slashing.d.ts @@ -0,0 +1,69 @@ +/// +import { Duration, DurationSDKType } from "../../../google/protobuf/duration"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * ValidatorSigningInfo defines a validator's signing info for monitoring their + * liveness activity. + */ +export interface ValidatorSigningInfo { + address: string; + /** Height at which validator was first a candidate OR was unjailed */ + startHeight: Long; + /** + * Index which is incremented each time the validator was a bonded + * in a block and may have signed a precommit or not. This in conjunction with the + * `SignedBlocksWindow` param determines the index in the `MissedBlocksBitArray`. + */ + indexOffset: Long; + /** Timestamp until which the validator is jailed due to liveness downtime. */ + jailedUntil?: Date; + /** + * Whether or not a validator has been tombstoned (killed out of validator set). It is set + * once the validator commits an equivocation or for any other configured misbehiavor. + */ + tombstoned: boolean; + /** + * A counter kept to avoid unnecessary array reads. + * Note that `Sum(MissedBlocksBitArray)` always equals `MissedBlocksCounter`. + */ + missedBlocksCounter: Long; +} +/** + * ValidatorSigningInfo defines a validator's signing info for monitoring their + * liveness activity. + */ +export interface ValidatorSigningInfoSDKType { + address: string; + start_height: Long; + index_offset: Long; + jailed_until?: Date; + tombstoned: boolean; + missed_blocks_counter: Long; +} +/** Params represents the parameters used for by the slashing module. */ +export interface Params { + signedBlocksWindow: Long; + minSignedPerWindow: Uint8Array; + downtimeJailDuration?: Duration; + slashFractionDoubleSign: Uint8Array; + slashFractionDowntime: Uint8Array; +} +/** Params represents the parameters used for by the slashing module. */ +export interface ParamsSDKType { + signed_blocks_window: Long; + min_signed_per_window: Uint8Array; + downtime_jail_duration?: DurationSDKType; + slash_fraction_double_sign: Uint8Array; + slash_fraction_downtime: Uint8Array; +} +export declare const ValidatorSigningInfo: { + encode(message: ValidatorSigningInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorSigningInfo; + fromPartial(object: DeepPartial): ValidatorSigningInfo; +}; +export declare const Params: { + encode(message: Params, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Params; + fromPartial(object: DeepPartial): Params; +}; diff --git a/packages/codegen/dist/cosmos/slashing/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/slashing/v1beta1/tx.d.ts new file mode 100644 index 00000000..fe6632c6 --- /dev/null +++ b/packages/codegen/dist/cosmos/slashing/v1beta1/tx.d.ts @@ -0,0 +1,26 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** MsgUnjail defines the Msg/Unjail request type */ +export interface MsgUnjail { + validatorAddr: string; +} +/** MsgUnjail defines the Msg/Unjail request type */ +export interface MsgUnjailSDKType { + validator_addr: string; +} +/** MsgUnjailResponse defines the Msg/Unjail response type */ +export interface MsgUnjailResponse { +} +/** MsgUnjailResponse defines the Msg/Unjail response type */ +export interface MsgUnjailResponseSDKType { +} +export declare const MsgUnjail: { + encode(message: MsgUnjail, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUnjail; + fromPartial(object: DeepPartial): MsgUnjail; +}; +export declare const MsgUnjailResponse: { + encode(_: MsgUnjailResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUnjailResponse; + fromPartial(_: DeepPartial): MsgUnjailResponse; +}; diff --git a/packages/codegen/dist/cosmos/slashing/v1beta1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/slashing/v1beta1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..706396b1 --- /dev/null +++ b/packages/codegen/dist/cosmos/slashing/v1beta1/tx.rpc.msg.d.ts @@ -0,0 +1,16 @@ +import { Rpc } from "../../../helpers"; +import { MsgUnjail, MsgUnjailResponse } from "./tx"; +/** Msg defines the slashing Msg service. */ +export interface Msg { + /** + * Unjail defines a method for unjailing a jailed validator, thus returning + * them into the bonded validator set, so they can begin receiving provisions + * and rewards again. + */ + unjail(request: MsgUnjail): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + unjail(request: MsgUnjail): Promise; +} diff --git a/packages/codegen/dist/cosmos/staking/v1beta1/authz.d.ts b/packages/codegen/dist/cosmos/staking/v1beta1/authz.d.ts new file mode 100644 index 00000000..a5baac2d --- /dev/null +++ b/packages/codegen/dist/cosmos/staking/v1beta1/authz.d.ts @@ -0,0 +1,72 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * AuthorizationType defines the type of staking module authorization type + * + * Since: cosmos-sdk 0.43 + */ +export declare enum AuthorizationType { + /** AUTHORIZATION_TYPE_UNSPECIFIED - AUTHORIZATION_TYPE_UNSPECIFIED specifies an unknown authorization type */ + AUTHORIZATION_TYPE_UNSPECIFIED = 0, + /** AUTHORIZATION_TYPE_DELEGATE - AUTHORIZATION_TYPE_DELEGATE defines an authorization type for Msg/Delegate */ + AUTHORIZATION_TYPE_DELEGATE = 1, + /** AUTHORIZATION_TYPE_UNDELEGATE - AUTHORIZATION_TYPE_UNDELEGATE defines an authorization type for Msg/Undelegate */ + AUTHORIZATION_TYPE_UNDELEGATE = 2, + /** AUTHORIZATION_TYPE_REDELEGATE - AUTHORIZATION_TYPE_REDELEGATE defines an authorization type for Msg/BeginRedelegate */ + AUTHORIZATION_TYPE_REDELEGATE = 3, + UNRECOGNIZED = -1 +} +export declare const AuthorizationTypeSDKType: typeof AuthorizationType; +export declare function authorizationTypeFromJSON(object: any): AuthorizationType; +export declare function authorizationTypeToJSON(object: AuthorizationType): string; +/** + * StakeAuthorization defines authorization for delegate/undelegate/redelegate. + * + * Since: cosmos-sdk 0.43 + */ +export interface StakeAuthorization { + /** + * max_tokens specifies the maximum amount of tokens can be delegate to a validator. If it is + * empty, there is no spend limit and any amount of coins can be delegated. + */ + maxTokens?: Coin; + /** + * allow_list specifies list of validator addresses to whom grantee can delegate tokens on behalf of granter's + * account. + */ + allowList?: StakeAuthorization_Validators; + /** deny_list specifies list of validator addresses to whom grantee can not delegate tokens. */ + denyList?: StakeAuthorization_Validators; + /** authorization_type defines one of AuthorizationType. */ + authorizationType: AuthorizationType; +} +/** + * StakeAuthorization defines authorization for delegate/undelegate/redelegate. + * + * Since: cosmos-sdk 0.43 + */ +export interface StakeAuthorizationSDKType { + max_tokens?: CoinSDKType; + allow_list?: StakeAuthorization_ValidatorsSDKType; + deny_list?: StakeAuthorization_ValidatorsSDKType; + authorization_type: AuthorizationType; +} +/** Validators defines list of validator addresses. */ +export interface StakeAuthorization_Validators { + address: string[]; +} +/** Validators defines list of validator addresses. */ +export interface StakeAuthorization_ValidatorsSDKType { + address: string[]; +} +export declare const StakeAuthorization: { + encode(message: StakeAuthorization, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): StakeAuthorization; + fromPartial(object: DeepPartial): StakeAuthorization; +}; +export declare const StakeAuthorization_Validators: { + encode(message: StakeAuthorization_Validators, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): StakeAuthorization_Validators; + fromPartial(object: DeepPartial): StakeAuthorization_Validators; +}; diff --git a/packages/codegen/dist/cosmos/staking/v1beta1/genesis.d.ts b/packages/codegen/dist/cosmos/staking/v1beta1/genesis.d.ts new file mode 100644 index 00000000..975dbd15 --- /dev/null +++ b/packages/codegen/dist/cosmos/staking/v1beta1/genesis.d.ts @@ -0,0 +1,61 @@ +/// +import { Params, ParamsSDKType, Validator, ValidatorSDKType, Delegation, DelegationSDKType, UnbondingDelegation, UnbondingDelegationSDKType, Redelegation, RedelegationSDKType } from "./staking"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** GenesisState defines the staking module's genesis state. */ +export interface GenesisState { + /** params defines all the paramaters of related to deposit. */ + params?: Params; + /** + * last_total_power tracks the total amounts of bonded tokens recorded during + * the previous end block. + */ + lastTotalPower: Uint8Array; + /** + * last_validator_powers is a special index that provides a historical list + * of the last-block's bonded validators. + */ + lastValidatorPowers: LastValidatorPower[]; + /** delegations defines the validator set at genesis. */ + validators: Validator[]; + /** delegations defines the delegations active at genesis. */ + delegations: Delegation[]; + /** unbonding_delegations defines the unbonding delegations active at genesis. */ + unbondingDelegations: UnbondingDelegation[]; + /** redelegations defines the redelegations active at genesis. */ + redelegations: Redelegation[]; + exported: boolean; +} +/** GenesisState defines the staking module's genesis state. */ +export interface GenesisStateSDKType { + params?: ParamsSDKType; + last_total_power: Uint8Array; + last_validator_powers: LastValidatorPowerSDKType[]; + validators: ValidatorSDKType[]; + delegations: DelegationSDKType[]; + unbonding_delegations: UnbondingDelegationSDKType[]; + redelegations: RedelegationSDKType[]; + exported: boolean; +} +/** LastValidatorPower required for validator set update logic. */ +export interface LastValidatorPower { + /** address is the address of the validator. */ + address: string; + /** power defines the power of the validator. */ + power: Long; +} +/** LastValidatorPower required for validator set update logic. */ +export interface LastValidatorPowerSDKType { + address: string; + power: Long; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; +export declare const LastValidatorPower: { + encode(message: LastValidatorPower, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): LastValidatorPower; + fromPartial(object: DeepPartial): LastValidatorPower; +}; diff --git a/packages/codegen/dist/cosmos/staking/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/staking/v1beta1/query.d.ts new file mode 100644 index 00000000..e7124182 --- /dev/null +++ b/packages/codegen/dist/cosmos/staking/v1beta1/query.d.ts @@ -0,0 +1,552 @@ +/// +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Validator, ValidatorSDKType, DelegationResponse, DelegationResponseSDKType, UnbondingDelegation, UnbondingDelegationSDKType, RedelegationResponse, RedelegationResponseSDKType, HistoricalInfo, HistoricalInfoSDKType, Pool, PoolSDKType, Params, ParamsSDKType } from "./staking"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** QueryValidatorsRequest is request type for Query/Validators RPC method. */ +export interface QueryValidatorsRequest { + /** status enables to query for validators matching a given status. */ + status: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryValidatorsRequest is request type for Query/Validators RPC method. */ +export interface QueryValidatorsRequestSDKType { + status: string; + pagination?: PageRequestSDKType; +} +/** QueryValidatorsResponse is response type for the Query/Validators RPC method */ +export interface QueryValidatorsResponse { + /** validators contains all the queried validators. */ + validators: Validator[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryValidatorsResponse is response type for the Query/Validators RPC method */ +export interface QueryValidatorsResponseSDKType { + validators: ValidatorSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryValidatorRequest is response type for the Query/Validator RPC method */ +export interface QueryValidatorRequest { + /** validator_addr defines the validator address to query for. */ + validatorAddr: string; +} +/** QueryValidatorRequest is response type for the Query/Validator RPC method */ +export interface QueryValidatorRequestSDKType { + validator_addr: string; +} +/** QueryValidatorResponse is response type for the Query/Validator RPC method */ +export interface QueryValidatorResponse { + /** validator defines the the validator info. */ + validator?: Validator; +} +/** QueryValidatorResponse is response type for the Query/Validator RPC method */ +export interface QueryValidatorResponseSDKType { + validator?: ValidatorSDKType; +} +/** + * QueryValidatorDelegationsRequest is request type for the + * Query/ValidatorDelegations RPC method + */ +export interface QueryValidatorDelegationsRequest { + /** validator_addr defines the validator address to query for. */ + validatorAddr: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryValidatorDelegationsRequest is request type for the + * Query/ValidatorDelegations RPC method + */ +export interface QueryValidatorDelegationsRequestSDKType { + validator_addr: string; + pagination?: PageRequestSDKType; +} +/** + * QueryValidatorDelegationsResponse is response type for the + * Query/ValidatorDelegations RPC method + */ +export interface QueryValidatorDelegationsResponse { + delegationResponses: DelegationResponse[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryValidatorDelegationsResponse is response type for the + * Query/ValidatorDelegations RPC method + */ +export interface QueryValidatorDelegationsResponseSDKType { + delegation_responses: DelegationResponseSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryValidatorUnbondingDelegationsRequest is required type for the + * Query/ValidatorUnbondingDelegations RPC method + */ +export interface QueryValidatorUnbondingDelegationsRequest { + /** validator_addr defines the validator address to query for. */ + validatorAddr: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryValidatorUnbondingDelegationsRequest is required type for the + * Query/ValidatorUnbondingDelegations RPC method + */ +export interface QueryValidatorUnbondingDelegationsRequestSDKType { + validator_addr: string; + pagination?: PageRequestSDKType; +} +/** + * QueryValidatorUnbondingDelegationsResponse is response type for the + * Query/ValidatorUnbondingDelegations RPC method. + */ +export interface QueryValidatorUnbondingDelegationsResponse { + unbondingResponses: UnbondingDelegation[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryValidatorUnbondingDelegationsResponse is response type for the + * Query/ValidatorUnbondingDelegations RPC method. + */ +export interface QueryValidatorUnbondingDelegationsResponseSDKType { + unbonding_responses: UnbondingDelegationSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryDelegationRequest is request type for the Query/Delegation RPC method. */ +export interface QueryDelegationRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** validator_addr defines the validator address to query for. */ + validatorAddr: string; +} +/** QueryDelegationRequest is request type for the Query/Delegation RPC method. */ +export interface QueryDelegationRequestSDKType { + delegator_addr: string; + validator_addr: string; +} +/** QueryDelegationResponse is response type for the Query/Delegation RPC method. */ +export interface QueryDelegationResponse { + /** delegation_responses defines the delegation info of a delegation. */ + delegationResponse?: DelegationResponse; +} +/** QueryDelegationResponse is response type for the Query/Delegation RPC method. */ +export interface QueryDelegationResponseSDKType { + delegation_response?: DelegationResponseSDKType; +} +/** + * QueryUnbondingDelegationRequest is request type for the + * Query/UnbondingDelegation RPC method. + */ +export interface QueryUnbondingDelegationRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** validator_addr defines the validator address to query for. */ + validatorAddr: string; +} +/** + * QueryUnbondingDelegationRequest is request type for the + * Query/UnbondingDelegation RPC method. + */ +export interface QueryUnbondingDelegationRequestSDKType { + delegator_addr: string; + validator_addr: string; +} +/** + * QueryDelegationResponse is response type for the Query/UnbondingDelegation + * RPC method. + */ +export interface QueryUnbondingDelegationResponse { + /** unbond defines the unbonding information of a delegation. */ + unbond?: UnbondingDelegation; +} +/** + * QueryDelegationResponse is response type for the Query/UnbondingDelegation + * RPC method. + */ +export interface QueryUnbondingDelegationResponseSDKType { + unbond?: UnbondingDelegationSDKType; +} +/** + * QueryDelegatorDelegationsRequest is request type for the + * Query/DelegatorDelegations RPC method. + */ +export interface QueryDelegatorDelegationsRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryDelegatorDelegationsRequest is request type for the + * Query/DelegatorDelegations RPC method. + */ +export interface QueryDelegatorDelegationsRequestSDKType { + delegator_addr: string; + pagination?: PageRequestSDKType; +} +/** + * QueryDelegatorDelegationsResponse is response type for the + * Query/DelegatorDelegations RPC method. + */ +export interface QueryDelegatorDelegationsResponse { + /** delegation_responses defines all the delegations' info of a delegator. */ + delegationResponses: DelegationResponse[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryDelegatorDelegationsResponse is response type for the + * Query/DelegatorDelegations RPC method. + */ +export interface QueryDelegatorDelegationsResponseSDKType { + delegation_responses: DelegationResponseSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryDelegatorUnbondingDelegationsRequest is request type for the + * Query/DelegatorUnbondingDelegations RPC method. + */ +export interface QueryDelegatorUnbondingDelegationsRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryDelegatorUnbondingDelegationsRequest is request type for the + * Query/DelegatorUnbondingDelegations RPC method. + */ +export interface QueryDelegatorUnbondingDelegationsRequestSDKType { + delegator_addr: string; + pagination?: PageRequestSDKType; +} +/** + * QueryUnbondingDelegatorDelegationsResponse is response type for the + * Query/UnbondingDelegatorDelegations RPC method. + */ +export interface QueryDelegatorUnbondingDelegationsResponse { + unbondingResponses: UnbondingDelegation[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryUnbondingDelegatorDelegationsResponse is response type for the + * Query/UnbondingDelegatorDelegations RPC method. + */ +export interface QueryDelegatorUnbondingDelegationsResponseSDKType { + unbonding_responses: UnbondingDelegationSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryRedelegationsRequest is request type for the Query/Redelegations RPC + * method. + */ +export interface QueryRedelegationsRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** src_validator_addr defines the validator address to redelegate from. */ + srcValidatorAddr: string; + /** dst_validator_addr defines the validator address to redelegate to. */ + dstValidatorAddr: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryRedelegationsRequest is request type for the Query/Redelegations RPC + * method. + */ +export interface QueryRedelegationsRequestSDKType { + delegator_addr: string; + src_validator_addr: string; + dst_validator_addr: string; + pagination?: PageRequestSDKType; +} +/** + * QueryRedelegationsResponse is response type for the Query/Redelegations RPC + * method. + */ +export interface QueryRedelegationsResponse { + redelegationResponses: RedelegationResponse[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryRedelegationsResponse is response type for the Query/Redelegations RPC + * method. + */ +export interface QueryRedelegationsResponseSDKType { + redelegation_responses: RedelegationResponseSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryDelegatorValidatorsRequest is request type for the + * Query/DelegatorValidators RPC method. + */ +export interface QueryDelegatorValidatorsRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryDelegatorValidatorsRequest is request type for the + * Query/DelegatorValidators RPC method. + */ +export interface QueryDelegatorValidatorsRequestSDKType { + delegator_addr: string; + pagination?: PageRequestSDKType; +} +/** + * QueryDelegatorValidatorsResponse is response type for the + * Query/DelegatorValidators RPC method. + */ +export interface QueryDelegatorValidatorsResponse { + /** validators defines the the validators' info of a delegator. */ + validators: Validator[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryDelegatorValidatorsResponse is response type for the + * Query/DelegatorValidators RPC method. + */ +export interface QueryDelegatorValidatorsResponseSDKType { + validators: ValidatorSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryDelegatorValidatorRequest is request type for the + * Query/DelegatorValidator RPC method. + */ +export interface QueryDelegatorValidatorRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** validator_addr defines the validator address to query for. */ + validatorAddr: string; +} +/** + * QueryDelegatorValidatorRequest is request type for the + * Query/DelegatorValidator RPC method. + */ +export interface QueryDelegatorValidatorRequestSDKType { + delegator_addr: string; + validator_addr: string; +} +/** + * QueryDelegatorValidatorResponse response type for the + * Query/DelegatorValidator RPC method. + */ +export interface QueryDelegatorValidatorResponse { + /** validator defines the the validator info. */ + validator?: Validator; +} +/** + * QueryDelegatorValidatorResponse response type for the + * Query/DelegatorValidator RPC method. + */ +export interface QueryDelegatorValidatorResponseSDKType { + validator?: ValidatorSDKType; +} +/** + * QueryHistoricalInfoRequest is request type for the Query/HistoricalInfo RPC + * method. + */ +export interface QueryHistoricalInfoRequest { + /** height defines at which height to query the historical info. */ + height: Long; +} +/** + * QueryHistoricalInfoRequest is request type for the Query/HistoricalInfo RPC + * method. + */ +export interface QueryHistoricalInfoRequestSDKType { + height: Long; +} +/** + * QueryHistoricalInfoResponse is response type for the Query/HistoricalInfo RPC + * method. + */ +export interface QueryHistoricalInfoResponse { + /** hist defines the historical info at the given height. */ + hist?: HistoricalInfo; +} +/** + * QueryHistoricalInfoResponse is response type for the Query/HistoricalInfo RPC + * method. + */ +export interface QueryHistoricalInfoResponseSDKType { + hist?: HistoricalInfoSDKType; +} +/** QueryPoolRequest is request type for the Query/Pool RPC method. */ +export interface QueryPoolRequest { +} +/** QueryPoolRequest is request type for the Query/Pool RPC method. */ +export interface QueryPoolRequestSDKType { +} +/** QueryPoolResponse is response type for the Query/Pool RPC method. */ +export interface QueryPoolResponse { + /** pool defines the pool info. */ + pool?: Pool; +} +/** QueryPoolResponse is response type for the Query/Pool RPC method. */ +export interface QueryPoolResponseSDKType { + pool?: PoolSDKType; +} +/** QueryParamsRequest is request type for the Query/Params RPC method. */ +export interface QueryParamsRequest { +} +/** QueryParamsRequest is request type for the Query/Params RPC method. */ +export interface QueryParamsRequestSDKType { +} +/** QueryParamsResponse is response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** params holds all the parameters of this module. */ + params?: Params; +} +/** QueryParamsResponse is response type for the Query/Params RPC method. */ +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +export declare const QueryValidatorsRequest: { + encode(message: QueryValidatorsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorsRequest; + fromPartial(object: DeepPartial): QueryValidatorsRequest; +}; +export declare const QueryValidatorsResponse: { + encode(message: QueryValidatorsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorsResponse; + fromPartial(object: DeepPartial): QueryValidatorsResponse; +}; +export declare const QueryValidatorRequest: { + encode(message: QueryValidatorRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorRequest; + fromPartial(object: DeepPartial): QueryValidatorRequest; +}; +export declare const QueryValidatorResponse: { + encode(message: QueryValidatorResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorResponse; + fromPartial(object: DeepPartial): QueryValidatorResponse; +}; +export declare const QueryValidatorDelegationsRequest: { + encode(message: QueryValidatorDelegationsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorDelegationsRequest; + fromPartial(object: DeepPartial): QueryValidatorDelegationsRequest; +}; +export declare const QueryValidatorDelegationsResponse: { + encode(message: QueryValidatorDelegationsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorDelegationsResponse; + fromPartial(object: DeepPartial): QueryValidatorDelegationsResponse; +}; +export declare const QueryValidatorUnbondingDelegationsRequest: { + encode(message: QueryValidatorUnbondingDelegationsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorUnbondingDelegationsRequest; + fromPartial(object: DeepPartial): QueryValidatorUnbondingDelegationsRequest; +}; +export declare const QueryValidatorUnbondingDelegationsResponse: { + encode(message: QueryValidatorUnbondingDelegationsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorUnbondingDelegationsResponse; + fromPartial(object: DeepPartial): QueryValidatorUnbondingDelegationsResponse; +}; +export declare const QueryDelegationRequest: { + encode(message: QueryDelegationRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegationRequest; + fromPartial(object: DeepPartial): QueryDelegationRequest; +}; +export declare const QueryDelegationResponse: { + encode(message: QueryDelegationResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegationResponse; + fromPartial(object: DeepPartial): QueryDelegationResponse; +}; +export declare const QueryUnbondingDelegationRequest: { + encode(message: QueryUnbondingDelegationRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnbondingDelegationRequest; + fromPartial(object: DeepPartial): QueryUnbondingDelegationRequest; +}; +export declare const QueryUnbondingDelegationResponse: { + encode(message: QueryUnbondingDelegationResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnbondingDelegationResponse; + fromPartial(object: DeepPartial): QueryUnbondingDelegationResponse; +}; +export declare const QueryDelegatorDelegationsRequest: { + encode(message: QueryDelegatorDelegationsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorDelegationsRequest; + fromPartial(object: DeepPartial): QueryDelegatorDelegationsRequest; +}; +export declare const QueryDelegatorDelegationsResponse: { + encode(message: QueryDelegatorDelegationsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorDelegationsResponse; + fromPartial(object: DeepPartial): QueryDelegatorDelegationsResponse; +}; +export declare const QueryDelegatorUnbondingDelegationsRequest: { + encode(message: QueryDelegatorUnbondingDelegationsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorUnbondingDelegationsRequest; + fromPartial(object: DeepPartial): QueryDelegatorUnbondingDelegationsRequest; +}; +export declare const QueryDelegatorUnbondingDelegationsResponse: { + encode(message: QueryDelegatorUnbondingDelegationsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorUnbondingDelegationsResponse; + fromPartial(object: DeepPartial): QueryDelegatorUnbondingDelegationsResponse; +}; +export declare const QueryRedelegationsRequest: { + encode(message: QueryRedelegationsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryRedelegationsRequest; + fromPartial(object: DeepPartial): QueryRedelegationsRequest; +}; +export declare const QueryRedelegationsResponse: { + encode(message: QueryRedelegationsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryRedelegationsResponse; + fromPartial(object: DeepPartial): QueryRedelegationsResponse; +}; +export declare const QueryDelegatorValidatorsRequest: { + encode(message: QueryDelegatorValidatorsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorValidatorsRequest; + fromPartial(object: DeepPartial): QueryDelegatorValidatorsRequest; +}; +export declare const QueryDelegatorValidatorsResponse: { + encode(message: QueryDelegatorValidatorsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorValidatorsResponse; + fromPartial(object: DeepPartial): QueryDelegatorValidatorsResponse; +}; +export declare const QueryDelegatorValidatorRequest: { + encode(message: QueryDelegatorValidatorRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorValidatorRequest; + fromPartial(object: DeepPartial): QueryDelegatorValidatorRequest; +}; +export declare const QueryDelegatorValidatorResponse: { + encode(message: QueryDelegatorValidatorResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorValidatorResponse; + fromPartial(object: DeepPartial): QueryDelegatorValidatorResponse; +}; +export declare const QueryHistoricalInfoRequest: { + encode(message: QueryHistoricalInfoRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryHistoricalInfoRequest; + fromPartial(object: DeepPartial): QueryHistoricalInfoRequest; +}; +export declare const QueryHistoricalInfoResponse: { + encode(message: QueryHistoricalInfoResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryHistoricalInfoResponse; + fromPartial(object: DeepPartial): QueryHistoricalInfoResponse; +}; +export declare const QueryPoolRequest: { + encode(_: QueryPoolRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPoolRequest; + fromPartial(_: DeepPartial): QueryPoolRequest; +}; +export declare const QueryPoolResponse: { + encode(message: QueryPoolResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPoolResponse; + fromPartial(object: DeepPartial): QueryPoolResponse; +}; +export declare const QueryParamsRequest: { + encode(_: QueryParamsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest; + fromPartial(_: DeepPartial): QueryParamsRequest; +}; +export declare const QueryParamsResponse: { + encode(message: QueryParamsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse; + fromPartial(object: DeepPartial): QueryParamsResponse; +}; diff --git a/packages/codegen/dist/cosmos/staking/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/staking/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..f477464b --- /dev/null +++ b/packages/codegen/dist/cosmos/staking/v1beta1/query.lcd.d.ts @@ -0,0 +1,22 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryValidatorsRequest, QueryValidatorsResponseSDKType, QueryValidatorRequest, QueryValidatorResponseSDKType, QueryValidatorDelegationsRequest, QueryValidatorDelegationsResponseSDKType, QueryValidatorUnbondingDelegationsRequest, QueryValidatorUnbondingDelegationsResponseSDKType, QueryDelegationRequest, QueryDelegationResponseSDKType, QueryUnbondingDelegationRequest, QueryUnbondingDelegationResponseSDKType, QueryDelegatorDelegationsRequest, QueryDelegatorDelegationsResponseSDKType, QueryDelegatorUnbondingDelegationsRequest, QueryDelegatorUnbondingDelegationsResponseSDKType, QueryRedelegationsRequest, QueryRedelegationsResponseSDKType, QueryDelegatorValidatorsRequest, QueryDelegatorValidatorsResponseSDKType, QueryDelegatorValidatorRequest, QueryDelegatorValidatorResponseSDKType, QueryHistoricalInfoRequest, QueryHistoricalInfoResponseSDKType, QueryPoolRequest, QueryPoolResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + validators(params: QueryValidatorsRequest): Promise; + validator(params: QueryValidatorRequest): Promise; + validatorDelegations(params: QueryValidatorDelegationsRequest): Promise; + validatorUnbondingDelegations(params: QueryValidatorUnbondingDelegationsRequest): Promise; + delegation(params: QueryDelegationRequest): Promise; + unbondingDelegation(params: QueryUnbondingDelegationRequest): Promise; + delegatorDelegations(params: QueryDelegatorDelegationsRequest): Promise; + delegatorUnbondingDelegations(params: QueryDelegatorUnbondingDelegationsRequest): Promise; + redelegations(params: QueryRedelegationsRequest): Promise; + delegatorValidators(params: QueryDelegatorValidatorsRequest): Promise; + delegatorValidator(params: QueryDelegatorValidatorRequest): Promise; + historicalInfo(params: QueryHistoricalInfoRequest): Promise; + pool(_params?: QueryPoolRequest): Promise; + params(_params?: QueryParamsRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/staking/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/staking/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..6f710e5d --- /dev/null +++ b/packages/codegen/dist/cosmos/staking/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,80 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryValidatorsRequest, QueryValidatorsResponse, QueryValidatorRequest, QueryValidatorResponse, QueryValidatorDelegationsRequest, QueryValidatorDelegationsResponse, QueryValidatorUnbondingDelegationsRequest, QueryValidatorUnbondingDelegationsResponse, QueryDelegationRequest, QueryDelegationResponse, QueryUnbondingDelegationRequest, QueryUnbondingDelegationResponse, QueryDelegatorDelegationsRequest, QueryDelegatorDelegationsResponse, QueryDelegatorUnbondingDelegationsRequest, QueryDelegatorUnbondingDelegationsResponse, QueryRedelegationsRequest, QueryRedelegationsResponse, QueryDelegatorValidatorsRequest, QueryDelegatorValidatorsResponse, QueryDelegatorValidatorRequest, QueryDelegatorValidatorResponse, QueryHistoricalInfoRequest, QueryHistoricalInfoResponse, QueryPoolRequest, QueryPoolResponse, QueryParamsRequest, QueryParamsResponse } from "./query"; +/** Query defines the gRPC querier service. */ +export interface Query { + /** Validators queries all validators that match the given status. */ + validators(request: QueryValidatorsRequest): Promise; + /** Validator queries validator info for given validator address. */ + validator(request: QueryValidatorRequest): Promise; + /** ValidatorDelegations queries delegate info for given validator. */ + validatorDelegations(request: QueryValidatorDelegationsRequest): Promise; + /** ValidatorUnbondingDelegations queries unbonding delegations of a validator. */ + validatorUnbondingDelegations(request: QueryValidatorUnbondingDelegationsRequest): Promise; + /** Delegation queries delegate info for given validator delegator pair. */ + delegation(request: QueryDelegationRequest): Promise; + /** + * UnbondingDelegation queries unbonding info for given validator delegator + * pair. + */ + unbondingDelegation(request: QueryUnbondingDelegationRequest): Promise; + /** DelegatorDelegations queries all delegations of a given delegator address. */ + delegatorDelegations(request: QueryDelegatorDelegationsRequest): Promise; + /** + * DelegatorUnbondingDelegations queries all unbonding delegations of a given + * delegator address. + */ + delegatorUnbondingDelegations(request: QueryDelegatorUnbondingDelegationsRequest): Promise; + /** Redelegations queries redelegations of given address. */ + redelegations(request: QueryRedelegationsRequest): Promise; + /** + * DelegatorValidators queries all validators info for given delegator + * address. + */ + delegatorValidators(request: QueryDelegatorValidatorsRequest): Promise; + /** + * DelegatorValidator queries validator info for given delegator validator + * pair. + */ + delegatorValidator(request: QueryDelegatorValidatorRequest): Promise; + /** HistoricalInfo queries the historical info for given height. */ + historicalInfo(request: QueryHistoricalInfoRequest): Promise; + /** Pool queries the pool info. */ + pool(request?: QueryPoolRequest): Promise; + /** Parameters queries the staking parameters. */ + params(request?: QueryParamsRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + validators(request: QueryValidatorsRequest): Promise; + validator(request: QueryValidatorRequest): Promise; + validatorDelegations(request: QueryValidatorDelegationsRequest): Promise; + validatorUnbondingDelegations(request: QueryValidatorUnbondingDelegationsRequest): Promise; + delegation(request: QueryDelegationRequest): Promise; + unbondingDelegation(request: QueryUnbondingDelegationRequest): Promise; + delegatorDelegations(request: QueryDelegatorDelegationsRequest): Promise; + delegatorUnbondingDelegations(request: QueryDelegatorUnbondingDelegationsRequest): Promise; + redelegations(request: QueryRedelegationsRequest): Promise; + delegatorValidators(request: QueryDelegatorValidatorsRequest): Promise; + delegatorValidator(request: QueryDelegatorValidatorRequest): Promise; + historicalInfo(request: QueryHistoricalInfoRequest): Promise; + pool(request?: QueryPoolRequest): Promise; + params(request?: QueryParamsRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + validators(request: QueryValidatorsRequest): Promise; + validator(request: QueryValidatorRequest): Promise; + validatorDelegations(request: QueryValidatorDelegationsRequest): Promise; + validatorUnbondingDelegations(request: QueryValidatorUnbondingDelegationsRequest): Promise; + delegation(request: QueryDelegationRequest): Promise; + unbondingDelegation(request: QueryUnbondingDelegationRequest): Promise; + delegatorDelegations(request: QueryDelegatorDelegationsRequest): Promise; + delegatorUnbondingDelegations(request: QueryDelegatorUnbondingDelegationsRequest): Promise; + redelegations(request: QueryRedelegationsRequest): Promise; + delegatorValidators(request: QueryDelegatorValidatorsRequest): Promise; + delegatorValidator(request: QueryDelegatorValidatorRequest): Promise; + historicalInfo(request: QueryHistoricalInfoRequest): Promise; + pool(request?: QueryPoolRequest): Promise; + params(request?: QueryParamsRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/staking/v1beta1/staking.d.ts b/packages/codegen/dist/cosmos/staking/v1beta1/staking.d.ts new file mode 100644 index 00000000..5ff33ef1 --- /dev/null +++ b/packages/codegen/dist/cosmos/staking/v1beta1/staking.d.ts @@ -0,0 +1,513 @@ +/// +import { Header, HeaderSDKType } from "../../../tendermint/types/types"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Duration, DurationSDKType } from "../../../google/protobuf/duration"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** BondStatus is the status of a validator. */ +export declare enum BondStatus { + /** BOND_STATUS_UNSPECIFIED - UNSPECIFIED defines an invalid validator status. */ + BOND_STATUS_UNSPECIFIED = 0, + /** BOND_STATUS_UNBONDED - UNBONDED defines a validator that is not bonded. */ + BOND_STATUS_UNBONDED = 1, + /** BOND_STATUS_UNBONDING - UNBONDING defines a validator that is unbonding. */ + BOND_STATUS_UNBONDING = 2, + /** BOND_STATUS_BONDED - BONDED defines a validator that is bonded. */ + BOND_STATUS_BONDED = 3, + UNRECOGNIZED = -1 +} +export declare const BondStatusSDKType: typeof BondStatus; +export declare function bondStatusFromJSON(object: any): BondStatus; +export declare function bondStatusToJSON(object: BondStatus): string; +/** + * HistoricalInfo contains header and validator information for a given block. + * It is stored as part of staking module's state, which persists the `n` most + * recent HistoricalInfo + * (`n` is set by the staking module's `historical_entries` parameter). + */ +export interface HistoricalInfo { + header?: Header; + valset: Validator[]; +} +/** + * HistoricalInfo contains header and validator information for a given block. + * It is stored as part of staking module's state, which persists the `n` most + * recent HistoricalInfo + * (`n` is set by the staking module's `historical_entries` parameter). + */ +export interface HistoricalInfoSDKType { + header?: HeaderSDKType; + valset: ValidatorSDKType[]; +} +/** + * CommissionRates defines the initial commission rates to be used for creating + * a validator. + */ +export interface CommissionRates { + /** rate is the commission rate charged to delegators, as a fraction. */ + rate: string; + /** max_rate defines the maximum commission rate which validator can ever charge, as a fraction. */ + maxRate: string; + /** max_change_rate defines the maximum daily increase of the validator commission, as a fraction. */ + maxChangeRate: string; +} +/** + * CommissionRates defines the initial commission rates to be used for creating + * a validator. + */ +export interface CommissionRatesSDKType { + rate: string; + max_rate: string; + max_change_rate: string; +} +/** Commission defines commission parameters for a given validator. */ +export interface Commission { + /** commission_rates defines the initial commission rates to be used for creating a validator. */ + commissionRates?: CommissionRates; + /** update_time is the last time the commission rate was changed. */ + updateTime?: Date; +} +/** Commission defines commission parameters for a given validator. */ +export interface CommissionSDKType { + commission_rates?: CommissionRatesSDKType; + update_time?: Date; +} +/** Description defines a validator description. */ +export interface Description { + /** moniker defines a human-readable name for the validator. */ + moniker: string; + /** identity defines an optional identity signature (ex. UPort or Keybase). */ + identity: string; + /** website defines an optional website link. */ + website: string; + /** security_contact defines an optional email for security contact. */ + securityContact: string; + /** details define other optional details. */ + details: string; +} +/** Description defines a validator description. */ +export interface DescriptionSDKType { + moniker: string; + identity: string; + website: string; + security_contact: string; + details: string; +} +/** + * Validator defines a validator, together with the total amount of the + * Validator's bond shares and their exchange rate to coins. Slashing results in + * a decrease in the exchange rate, allowing correct calculation of future + * undelegations without iterating over delegators. When coins are delegated to + * this validator, the validator is credited with a delegation whose number of + * bond shares is based on the amount of coins delegated divided by the current + * exchange rate. Voting power can be calculated as total bonded shares + * multiplied by exchange rate. + */ +export interface Validator { + /** operator_address defines the address of the validator's operator; bech encoded in JSON. */ + operatorAddress: string; + /** consensus_pubkey is the consensus public key of the validator, as a Protobuf Any. */ + consensusPubkey?: Any; + /** jailed defined whether the validator has been jailed from bonded status or not. */ + jailed: boolean; + /** status is the validator status (bonded/unbonding/unbonded). */ + status: BondStatus; + /** tokens define the delegated tokens (incl. self-delegation). */ + tokens: string; + /** delegator_shares defines total shares issued to a validator's delegators. */ + delegatorShares: string; + /** description defines the description terms for the validator. */ + description?: Description; + /** unbonding_height defines, if unbonding, the height at which this validator has begun unbonding. */ + unbondingHeight: Long; + /** unbonding_time defines, if unbonding, the min time for the validator to complete unbonding. */ + unbondingTime?: Date; + /** commission defines the commission parameters. */ + commission?: Commission; + /** min_self_delegation is the validator's self declared minimum self delegation. */ + minSelfDelegation: string; +} +/** + * Validator defines a validator, together with the total amount of the + * Validator's bond shares and their exchange rate to coins. Slashing results in + * a decrease in the exchange rate, allowing correct calculation of future + * undelegations without iterating over delegators. When coins are delegated to + * this validator, the validator is credited with a delegation whose number of + * bond shares is based on the amount of coins delegated divided by the current + * exchange rate. Voting power can be calculated as total bonded shares + * multiplied by exchange rate. + */ +export interface ValidatorSDKType { + operator_address: string; + consensus_pubkey?: AnySDKType; + jailed: boolean; + status: BondStatus; + tokens: string; + delegator_shares: string; + description?: DescriptionSDKType; + unbonding_height: Long; + unbonding_time?: Date; + commission?: CommissionSDKType; + min_self_delegation: string; +} +/** ValAddresses defines a repeated set of validator addresses. */ +export interface ValAddresses { + addresses: string[]; +} +/** ValAddresses defines a repeated set of validator addresses. */ +export interface ValAddressesSDKType { + addresses: string[]; +} +/** + * DVPair is struct that just has a delegator-validator pair with no other data. + * It is intended to be used as a marshalable pointer. For example, a DVPair can + * be used to construct the key to getting an UnbondingDelegation from state. + */ +export interface DVPair { + delegatorAddress: string; + validatorAddress: string; +} +/** + * DVPair is struct that just has a delegator-validator pair with no other data. + * It is intended to be used as a marshalable pointer. For example, a DVPair can + * be used to construct the key to getting an UnbondingDelegation from state. + */ +export interface DVPairSDKType { + delegator_address: string; + validator_address: string; +} +/** DVPairs defines an array of DVPair objects. */ +export interface DVPairs { + pairs: DVPair[]; +} +/** DVPairs defines an array of DVPair objects. */ +export interface DVPairsSDKType { + pairs: DVPairSDKType[]; +} +/** + * DVVTriplet is struct that just has a delegator-validator-validator triplet + * with no other data. It is intended to be used as a marshalable pointer. For + * example, a DVVTriplet can be used to construct the key to getting a + * Redelegation from state. + */ +export interface DVVTriplet { + delegatorAddress: string; + validatorSrcAddress: string; + validatorDstAddress: string; +} +/** + * DVVTriplet is struct that just has a delegator-validator-validator triplet + * with no other data. It is intended to be used as a marshalable pointer. For + * example, a DVVTriplet can be used to construct the key to getting a + * Redelegation from state. + */ +export interface DVVTripletSDKType { + delegator_address: string; + validator_src_address: string; + validator_dst_address: string; +} +/** DVVTriplets defines an array of DVVTriplet objects. */ +export interface DVVTriplets { + triplets: DVVTriplet[]; +} +/** DVVTriplets defines an array of DVVTriplet objects. */ +export interface DVVTripletsSDKType { + triplets: DVVTripletSDKType[]; +} +/** + * Delegation represents the bond with tokens held by an account. It is + * owned by one delegator, and is associated with the voting power of one + * validator. + */ +export interface Delegation { + /** delegator_address is the bech32-encoded address of the delegator. */ + delegatorAddress: string; + /** validator_address is the bech32-encoded address of the validator. */ + validatorAddress: string; + /** shares define the delegation shares received. */ + shares: string; +} +/** + * Delegation represents the bond with tokens held by an account. It is + * owned by one delegator, and is associated with the voting power of one + * validator. + */ +export interface DelegationSDKType { + delegator_address: string; + validator_address: string; + shares: string; +} +/** + * UnbondingDelegation stores all of a single delegator's unbonding bonds + * for a single validator in an time-ordered list. + */ +export interface UnbondingDelegation { + /** delegator_address is the bech32-encoded address of the delegator. */ + delegatorAddress: string; + /** validator_address is the bech32-encoded address of the validator. */ + validatorAddress: string; + /** entries are the unbonding delegation entries. */ + entries: UnbondingDelegationEntry[]; +} +/** + * UnbondingDelegation stores all of a single delegator's unbonding bonds + * for a single validator in an time-ordered list. + */ +export interface UnbondingDelegationSDKType { + delegator_address: string; + validator_address: string; + entries: UnbondingDelegationEntrySDKType[]; +} +/** UnbondingDelegationEntry defines an unbonding object with relevant metadata. */ +export interface UnbondingDelegationEntry { + /** creation_height is the height which the unbonding took place. */ + creationHeight: Long; + /** completion_time is the unix time for unbonding completion. */ + completionTime?: Date; + /** initial_balance defines the tokens initially scheduled to receive at completion. */ + initialBalance: string; + /** balance defines the tokens to receive at completion. */ + balance: string; +} +/** UnbondingDelegationEntry defines an unbonding object with relevant metadata. */ +export interface UnbondingDelegationEntrySDKType { + creation_height: Long; + completion_time?: Date; + initial_balance: string; + balance: string; +} +/** RedelegationEntry defines a redelegation object with relevant metadata. */ +export interface RedelegationEntry { + /** creation_height defines the height which the redelegation took place. */ + creationHeight: Long; + /** completion_time defines the unix time for redelegation completion. */ + completionTime?: Date; + /** initial_balance defines the initial balance when redelegation started. */ + initialBalance: string; + /** shares_dst is the amount of destination-validator shares created by redelegation. */ + sharesDst: string; +} +/** RedelegationEntry defines a redelegation object with relevant metadata. */ +export interface RedelegationEntrySDKType { + creation_height: Long; + completion_time?: Date; + initial_balance: string; + shares_dst: string; +} +/** + * Redelegation contains the list of a particular delegator's redelegating bonds + * from a particular source validator to a particular destination validator. + */ +export interface Redelegation { + /** delegator_address is the bech32-encoded address of the delegator. */ + delegatorAddress: string; + /** validator_src_address is the validator redelegation source operator address. */ + validatorSrcAddress: string; + /** validator_dst_address is the validator redelegation destination operator address. */ + validatorDstAddress: string; + /** entries are the redelegation entries. */ + entries: RedelegationEntry[]; +} +/** + * Redelegation contains the list of a particular delegator's redelegating bonds + * from a particular source validator to a particular destination validator. + */ +export interface RedelegationSDKType { + delegator_address: string; + validator_src_address: string; + validator_dst_address: string; + entries: RedelegationEntrySDKType[]; +} +/** Params defines the parameters for the staking module. */ +export interface Params { + /** unbonding_time is the time duration of unbonding. */ + unbondingTime?: Duration; + /** max_validators is the maximum number of validators. */ + maxValidators: number; + /** max_entries is the max entries for either unbonding delegation or redelegation (per pair/trio). */ + maxEntries: number; + /** historical_entries is the number of historical entries to persist. */ + historicalEntries: number; + /** bond_denom defines the bondable coin denomination. */ + bondDenom: string; + /** min_commission_rate is the chain-wide minimum commission rate that a validator can charge their delegators */ + minCommissionRate: string; +} +/** Params defines the parameters for the staking module. */ +export interface ParamsSDKType { + unbonding_time?: DurationSDKType; + max_validators: number; + max_entries: number; + historical_entries: number; + bond_denom: string; + min_commission_rate: string; +} +/** + * DelegationResponse is equivalent to Delegation except that it contains a + * balance in addition to shares which is more suitable for client responses. + */ +export interface DelegationResponse { + delegation?: Delegation; + balance?: Coin; +} +/** + * DelegationResponse is equivalent to Delegation except that it contains a + * balance in addition to shares which is more suitable for client responses. + */ +export interface DelegationResponseSDKType { + delegation?: DelegationSDKType; + balance?: CoinSDKType; +} +/** + * RedelegationEntryResponse is equivalent to a RedelegationEntry except that it + * contains a balance in addition to shares which is more suitable for client + * responses. + */ +export interface RedelegationEntryResponse { + redelegationEntry?: RedelegationEntry; + balance: string; +} +/** + * RedelegationEntryResponse is equivalent to a RedelegationEntry except that it + * contains a balance in addition to shares which is more suitable for client + * responses. + */ +export interface RedelegationEntryResponseSDKType { + redelegation_entry?: RedelegationEntrySDKType; + balance: string; +} +/** + * RedelegationResponse is equivalent to a Redelegation except that its entries + * contain a balance in addition to shares which is more suitable for client + * responses. + */ +export interface RedelegationResponse { + redelegation?: Redelegation; + entries: RedelegationEntryResponse[]; +} +/** + * RedelegationResponse is equivalent to a Redelegation except that its entries + * contain a balance in addition to shares which is more suitable for client + * responses. + */ +export interface RedelegationResponseSDKType { + redelegation?: RedelegationSDKType; + entries: RedelegationEntryResponseSDKType[]; +} +/** + * Pool is used for tracking bonded and not-bonded token supply of the bond + * denomination. + */ +export interface Pool { + notBondedTokens: string; + bondedTokens: string; +} +/** + * Pool is used for tracking bonded and not-bonded token supply of the bond + * denomination. + */ +export interface PoolSDKType { + not_bonded_tokens: string; + bonded_tokens: string; +} +export declare const HistoricalInfo: { + encode(message: HistoricalInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): HistoricalInfo; + fromPartial(object: DeepPartial): HistoricalInfo; +}; +export declare const CommissionRates: { + encode(message: CommissionRates, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CommissionRates; + fromPartial(object: DeepPartial): CommissionRates; +}; +export declare const Commission: { + encode(message: Commission, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Commission; + fromPartial(object: DeepPartial): Commission; +}; +export declare const Description: { + encode(message: Description, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Description; + fromPartial(object: DeepPartial): Description; +}; +export declare const Validator: { + encode(message: Validator, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Validator; + fromPartial(object: DeepPartial): Validator; +}; +export declare const ValAddresses: { + encode(message: ValAddresses, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValAddresses; + fromPartial(object: DeepPartial): ValAddresses; +}; +export declare const DVPair: { + encode(message: DVPair, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DVPair; + fromPartial(object: DeepPartial): DVPair; +}; +export declare const DVPairs: { + encode(message: DVPairs, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DVPairs; + fromPartial(object: DeepPartial): DVPairs; +}; +export declare const DVVTriplet: { + encode(message: DVVTriplet, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DVVTriplet; + fromPartial(object: DeepPartial): DVVTriplet; +}; +export declare const DVVTriplets: { + encode(message: DVVTriplets, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DVVTriplets; + fromPartial(object: DeepPartial): DVVTriplets; +}; +export declare const Delegation: { + encode(message: Delegation, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Delegation; + fromPartial(object: DeepPartial): Delegation; +}; +export declare const UnbondingDelegation: { + encode(message: UnbondingDelegation, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): UnbondingDelegation; + fromPartial(object: DeepPartial): UnbondingDelegation; +}; +export declare const UnbondingDelegationEntry: { + encode(message: UnbondingDelegationEntry, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): UnbondingDelegationEntry; + fromPartial(object: DeepPartial): UnbondingDelegationEntry; +}; +export declare const RedelegationEntry: { + encode(message: RedelegationEntry, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RedelegationEntry; + fromPartial(object: DeepPartial): RedelegationEntry; +}; +export declare const Redelegation: { + encode(message: Redelegation, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Redelegation; + fromPartial(object: DeepPartial): Redelegation; +}; +export declare const Params: { + encode(message: Params, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Params; + fromPartial(object: DeepPartial): Params; +}; +export declare const DelegationResponse: { + encode(message: DelegationResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DelegationResponse; + fromPartial(object: DeepPartial): DelegationResponse; +}; +export declare const RedelegationEntryResponse: { + encode(message: RedelegationEntryResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RedelegationEntryResponse; + fromPartial(object: DeepPartial): RedelegationEntryResponse; +}; +export declare const RedelegationResponse: { + encode(message: RedelegationResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RedelegationResponse; + fromPartial(object: DeepPartial): RedelegationResponse; +}; +export declare const Pool: { + encode(message: Pool, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Pool; + fromPartial(object: DeepPartial): Pool; +}; diff --git a/packages/codegen/dist/cosmos/staking/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/staking/v1beta1/tx.d.ts new file mode 100644 index 00000000..4c3d5d75 --- /dev/null +++ b/packages/codegen/dist/cosmos/staking/v1beta1/tx.d.ts @@ -0,0 +1,185 @@ +import { Description, DescriptionSDKType, CommissionRates, CommissionRatesSDKType } from "./staking"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** MsgCreateValidator defines a SDK message for creating a new validator. */ +export interface MsgCreateValidator { + description?: Description; + commission?: CommissionRates; + minSelfDelegation: string; + delegatorAddress: string; + validatorAddress: string; + pubkey?: Any; + value?: Coin; +} +/** MsgCreateValidator defines a SDK message for creating a new validator. */ +export interface MsgCreateValidatorSDKType { + description?: DescriptionSDKType; + commission?: CommissionRatesSDKType; + min_self_delegation: string; + delegator_address: string; + validator_address: string; + pubkey?: AnySDKType; + value?: CoinSDKType; +} +/** MsgCreateValidatorResponse defines the Msg/CreateValidator response type. */ +export interface MsgCreateValidatorResponse { +} +/** MsgCreateValidatorResponse defines the Msg/CreateValidator response type. */ +export interface MsgCreateValidatorResponseSDKType { +} +/** MsgEditValidator defines a SDK message for editing an existing validator. */ +export interface MsgEditValidator { + description?: Description; + validatorAddress: string; + /** + * We pass a reference to the new commission rate and min self delegation as + * it's not mandatory to update. If not updated, the deserialized rate will be + * zero with no way to distinguish if an update was intended. + * REF: #2373 + */ + commissionRate: string; + minSelfDelegation: string; +} +/** MsgEditValidator defines a SDK message for editing an existing validator. */ +export interface MsgEditValidatorSDKType { + description?: DescriptionSDKType; + validator_address: string; + commission_rate: string; + min_self_delegation: string; +} +/** MsgEditValidatorResponse defines the Msg/EditValidator response type. */ +export interface MsgEditValidatorResponse { +} +/** MsgEditValidatorResponse defines the Msg/EditValidator response type. */ +export interface MsgEditValidatorResponseSDKType { +} +/** + * MsgDelegate defines a SDK message for performing a delegation of coins + * from a delegator to a validator. + */ +export interface MsgDelegate { + delegatorAddress: string; + validatorAddress: string; + amount?: Coin; +} +/** + * MsgDelegate defines a SDK message for performing a delegation of coins + * from a delegator to a validator. + */ +export interface MsgDelegateSDKType { + delegator_address: string; + validator_address: string; + amount?: CoinSDKType; +} +/** MsgDelegateResponse defines the Msg/Delegate response type. */ +export interface MsgDelegateResponse { +} +/** MsgDelegateResponse defines the Msg/Delegate response type. */ +export interface MsgDelegateResponseSDKType { +} +/** + * MsgBeginRedelegate defines a SDK message for performing a redelegation + * of coins from a delegator and source validator to a destination validator. + */ +export interface MsgBeginRedelegate { + delegatorAddress: string; + validatorSrcAddress: string; + validatorDstAddress: string; + amount?: Coin; +} +/** + * MsgBeginRedelegate defines a SDK message for performing a redelegation + * of coins from a delegator and source validator to a destination validator. + */ +export interface MsgBeginRedelegateSDKType { + delegator_address: string; + validator_src_address: string; + validator_dst_address: string; + amount?: CoinSDKType; +} +/** MsgBeginRedelegateResponse defines the Msg/BeginRedelegate response type. */ +export interface MsgBeginRedelegateResponse { + completionTime?: Date; +} +/** MsgBeginRedelegateResponse defines the Msg/BeginRedelegate response type. */ +export interface MsgBeginRedelegateResponseSDKType { + completion_time?: Date; +} +/** + * MsgUndelegate defines a SDK message for performing an undelegation from a + * delegate and a validator. + */ +export interface MsgUndelegate { + delegatorAddress: string; + validatorAddress: string; + amount?: Coin; +} +/** + * MsgUndelegate defines a SDK message for performing an undelegation from a + * delegate and a validator. + */ +export interface MsgUndelegateSDKType { + delegator_address: string; + validator_address: string; + amount?: CoinSDKType; +} +/** MsgUndelegateResponse defines the Msg/Undelegate response type. */ +export interface MsgUndelegateResponse { + completionTime?: Date; +} +/** MsgUndelegateResponse defines the Msg/Undelegate response type. */ +export interface MsgUndelegateResponseSDKType { + completion_time?: Date; +} +export declare const MsgCreateValidator: { + encode(message: MsgCreateValidator, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateValidator; + fromPartial(object: DeepPartial): MsgCreateValidator; +}; +export declare const MsgCreateValidatorResponse: { + encode(_: MsgCreateValidatorResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateValidatorResponse; + fromPartial(_: DeepPartial): MsgCreateValidatorResponse; +}; +export declare const MsgEditValidator: { + encode(message: MsgEditValidator, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgEditValidator; + fromPartial(object: DeepPartial): MsgEditValidator; +}; +export declare const MsgEditValidatorResponse: { + encode(_: MsgEditValidatorResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgEditValidatorResponse; + fromPartial(_: DeepPartial): MsgEditValidatorResponse; +}; +export declare const MsgDelegate: { + encode(message: MsgDelegate, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDelegate; + fromPartial(object: DeepPartial): MsgDelegate; +}; +export declare const MsgDelegateResponse: { + encode(_: MsgDelegateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDelegateResponse; + fromPartial(_: DeepPartial): MsgDelegateResponse; +}; +export declare const MsgBeginRedelegate: { + encode(message: MsgBeginRedelegate, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgBeginRedelegate; + fromPartial(object: DeepPartial): MsgBeginRedelegate; +}; +export declare const MsgBeginRedelegateResponse: { + encode(message: MsgBeginRedelegateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgBeginRedelegateResponse; + fromPartial(object: DeepPartial): MsgBeginRedelegateResponse; +}; +export declare const MsgUndelegate: { + encode(message: MsgUndelegate, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUndelegate; + fromPartial(object: DeepPartial): MsgUndelegate; +}; +export declare const MsgUndelegateResponse: { + encode(message: MsgUndelegateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUndelegateResponse; + fromPartial(object: DeepPartial): MsgUndelegateResponse; +}; diff --git a/packages/codegen/dist/cosmos/staking/v1beta1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/staking/v1beta1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..6de176bb --- /dev/null +++ b/packages/codegen/dist/cosmos/staking/v1beta1/tx.rpc.msg.d.ts @@ -0,0 +1,33 @@ +import { Rpc } from "../../../helpers"; +import { MsgCreateValidator, MsgCreateValidatorResponse, MsgEditValidator, MsgEditValidatorResponse, MsgDelegate, MsgDelegateResponse, MsgBeginRedelegate, MsgBeginRedelegateResponse, MsgUndelegate, MsgUndelegateResponse } from "./tx"; +/** Msg defines the staking Msg service. */ +export interface Msg { + /** CreateValidator defines a method for creating a new validator. */ + createValidator(request: MsgCreateValidator): Promise; + /** EditValidator defines a method for editing an existing validator. */ + editValidator(request: MsgEditValidator): Promise; + /** + * Delegate defines a method for performing a delegation of coins + * from a delegator to a validator. + */ + delegate(request: MsgDelegate): Promise; + /** + * BeginRedelegate defines a method for performing a redelegation + * of coins from a delegator and source validator to a destination validator. + */ + beginRedelegate(request: MsgBeginRedelegate): Promise; + /** + * Undelegate defines a method for performing an undelegation from a + * delegate and a validator. + */ + undelegate(request: MsgUndelegate): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + createValidator(request: MsgCreateValidator): Promise; + editValidator(request: MsgEditValidator): Promise; + delegate(request: MsgDelegate): Promise; + beginRedelegate(request: MsgBeginRedelegate): Promise; + undelegate(request: MsgUndelegate): Promise; +} diff --git a/packages/codegen/dist/cosmos/tx/signing/v1beta1/signing.d.ts b/packages/codegen/dist/cosmos/tx/signing/v1beta1/signing.d.ts new file mode 100644 index 00000000..34aac02d --- /dev/null +++ b/packages/codegen/dist/cosmos/tx/signing/v1beta1/signing.d.ts @@ -0,0 +1,149 @@ +/// +import { CompactBitArray, CompactBitArraySDKType } from "../../../crypto/multisig/v1beta1/multisig"; +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * SignMode represents a signing mode with its own security guarantees. + * + * This enum should be considered a registry of all known sign modes + * in the Cosmos ecosystem. Apps are not expected to support all known + * sign modes. Apps that would like to support custom sign modes are + * encouraged to open a small PR against this file to add a new case + * to this SignMode enum describing their sign mode so that different + * apps have a consistent version of this enum. + */ +export declare enum SignMode { + /** + * SIGN_MODE_UNSPECIFIED - SIGN_MODE_UNSPECIFIED specifies an unknown signing mode and will be + * rejected. + */ + SIGN_MODE_UNSPECIFIED = 0, + /** + * SIGN_MODE_DIRECT - SIGN_MODE_DIRECT specifies a signing mode which uses SignDoc and is + * verified with raw bytes from Tx. + */ + SIGN_MODE_DIRECT = 1, + /** + * SIGN_MODE_TEXTUAL - SIGN_MODE_TEXTUAL is a future signing mode that will verify some + * human-readable textual representation on top of the binary representation + * from SIGN_MODE_DIRECT. It is currently not supported. + */ + SIGN_MODE_TEXTUAL = 2, + /** + * SIGN_MODE_DIRECT_AUX - SIGN_MODE_DIRECT_AUX specifies a signing mode which uses + * SignDocDirectAux. As opposed to SIGN_MODE_DIRECT, this sign mode does not + * require signers signing over other signers' `signer_info`. It also allows + * for adding Tips in transactions. + * + * Since: cosmos-sdk 0.46 + */ + SIGN_MODE_DIRECT_AUX = 3, + /** + * SIGN_MODE_LEGACY_AMINO_JSON - SIGN_MODE_LEGACY_AMINO_JSON is a backwards compatibility mode which uses + * Amino JSON and will be removed in the future. + */ + SIGN_MODE_LEGACY_AMINO_JSON = 127, + UNRECOGNIZED = -1 +} +export declare const SignModeSDKType: typeof SignMode; +export declare function signModeFromJSON(object: any): SignMode; +export declare function signModeToJSON(object: SignMode): string; +/** SignatureDescriptors wraps multiple SignatureDescriptor's. */ +export interface SignatureDescriptors { + /** signatures are the signature descriptors */ + signatures: SignatureDescriptor[]; +} +/** SignatureDescriptors wraps multiple SignatureDescriptor's. */ +export interface SignatureDescriptorsSDKType { + signatures: SignatureDescriptorSDKType[]; +} +/** + * SignatureDescriptor is a convenience type which represents the full data for + * a signature including the public key of the signer, signing modes and the + * signature itself. It is primarily used for coordinating signatures between + * clients. + */ +export interface SignatureDescriptor { + /** public_key is the public key of the signer */ + publicKey?: Any; + data?: SignatureDescriptor_Data; + /** + * sequence is the sequence of the account, which describes the + * number of committed transactions signed by a given address. It is used to prevent + * replay attacks. + */ + sequence: Long; +} +/** + * SignatureDescriptor is a convenience type which represents the full data for + * a signature including the public key of the signer, signing modes and the + * signature itself. It is primarily used for coordinating signatures between + * clients. + */ +export interface SignatureDescriptorSDKType { + public_key?: AnySDKType; + data?: SignatureDescriptor_DataSDKType; + sequence: Long; +} +/** Data represents signature data */ +export interface SignatureDescriptor_Data { + /** single represents a single signer */ + single?: SignatureDescriptor_Data_Single; + /** multi represents a multisig signer */ + multi?: SignatureDescriptor_Data_Multi; +} +/** Data represents signature data */ +export interface SignatureDescriptor_DataSDKType { + single?: SignatureDescriptor_Data_SingleSDKType; + multi?: SignatureDescriptor_Data_MultiSDKType; +} +/** Single is the signature data for a single signer */ +export interface SignatureDescriptor_Data_Single { + /** mode is the signing mode of the single signer */ + mode: SignMode; + /** signature is the raw signature bytes */ + signature: Uint8Array; +} +/** Single is the signature data for a single signer */ +export interface SignatureDescriptor_Data_SingleSDKType { + mode: SignMode; + signature: Uint8Array; +} +/** Multi is the signature data for a multisig public key */ +export interface SignatureDescriptor_Data_Multi { + /** bitarray specifies which keys within the multisig are signing */ + bitarray?: CompactBitArray; + /** signatures is the signatures of the multi-signature */ + signatures: SignatureDescriptor_Data[]; +} +/** Multi is the signature data for a multisig public key */ +export interface SignatureDescriptor_Data_MultiSDKType { + bitarray?: CompactBitArraySDKType; + signatures: SignatureDescriptor_DataSDKType[]; +} +export declare const SignatureDescriptors: { + encode(message: SignatureDescriptors, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptors; + fromPartial(object: DeepPartial): SignatureDescriptors; +}; +export declare const SignatureDescriptor: { + encode(message: SignatureDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptor; + fromPartial(object: DeepPartial): SignatureDescriptor; +}; +export declare const SignatureDescriptor_Data: { + encode(message: SignatureDescriptor_Data, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptor_Data; + fromPartial(object: DeepPartial): SignatureDescriptor_Data; +}; +export declare const SignatureDescriptor_Data_Single: { + encode(message: SignatureDescriptor_Data_Single, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptor_Data_Single; + fromPartial(object: DeepPartial): SignatureDescriptor_Data_Single; +}; +export declare const SignatureDescriptor_Data_Multi: { + encode(message: SignatureDescriptor_Data_Multi, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptor_Data_Multi; + fromPartial(object: DeepPartial): SignatureDescriptor_Data_Multi; +}; diff --git a/packages/codegen/dist/cosmos/tx/v1beta1/service.d.ts b/packages/codegen/dist/cosmos/tx/v1beta1/service.d.ts new file mode 100644 index 00000000..756e56b8 --- /dev/null +++ b/packages/codegen/dist/cosmos/tx/v1beta1/service.d.ts @@ -0,0 +1,286 @@ +/// +import { Tx, TxSDKType } from "./tx"; +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { TxResponse, TxResponseSDKType, GasInfo, GasInfoSDKType, Result, ResultSDKType } from "../../base/abci/v1beta1/abci"; +import { BlockID, BlockIDSDKType } from "../../../tendermint/types/types"; +import { Block, BlockSDKType } from "../../../tendermint/types/block"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** OrderBy defines the sorting order */ +export declare enum OrderBy { + /** ORDER_BY_UNSPECIFIED - ORDER_BY_UNSPECIFIED specifies an unknown sorting order. OrderBy defaults to ASC in this case. */ + ORDER_BY_UNSPECIFIED = 0, + /** ORDER_BY_ASC - ORDER_BY_ASC defines ascending order */ + ORDER_BY_ASC = 1, + /** ORDER_BY_DESC - ORDER_BY_DESC defines descending order */ + ORDER_BY_DESC = 2, + UNRECOGNIZED = -1 +} +export declare const OrderBySDKType: typeof OrderBy; +export declare function orderByFromJSON(object: any): OrderBy; +export declare function orderByToJSON(object: OrderBy): string; +/** BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method. */ +export declare enum BroadcastMode { + /** BROADCAST_MODE_UNSPECIFIED - zero-value for mode ordering */ + BROADCAST_MODE_UNSPECIFIED = 0, + /** + * BROADCAST_MODE_BLOCK - BROADCAST_MODE_BLOCK defines a tx broadcasting mode where the client waits for + * the tx to be committed in a block. + */ + BROADCAST_MODE_BLOCK = 1, + /** + * BROADCAST_MODE_SYNC - BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for + * a CheckTx execution response only. + */ + BROADCAST_MODE_SYNC = 2, + /** + * BROADCAST_MODE_ASYNC - BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns + * immediately. + */ + BROADCAST_MODE_ASYNC = 3, + UNRECOGNIZED = -1 +} +export declare const BroadcastModeSDKType: typeof BroadcastMode; +export declare function broadcastModeFromJSON(object: any): BroadcastMode; +export declare function broadcastModeToJSON(object: BroadcastMode): string; +/** + * GetTxsEventRequest is the request type for the Service.TxsByEvents + * RPC method. + */ +export interface GetTxsEventRequest { + /** events is the list of transaction event type. */ + events: string[]; + /** pagination defines a pagination for the request. */ + pagination?: PageRequest; + orderBy: OrderBy; +} +/** + * GetTxsEventRequest is the request type for the Service.TxsByEvents + * RPC method. + */ +export interface GetTxsEventRequestSDKType { + events: string[]; + pagination?: PageRequestSDKType; + order_by: OrderBy; +} +/** + * GetTxsEventResponse is the response type for the Service.TxsByEvents + * RPC method. + */ +export interface GetTxsEventResponse { + /** txs is the list of queried transactions. */ + txs: Tx[]; + /** tx_responses is the list of queried TxResponses. */ + txResponses: TxResponse[]; + /** pagination defines a pagination for the response. */ + pagination?: PageResponse; +} +/** + * GetTxsEventResponse is the response type for the Service.TxsByEvents + * RPC method. + */ +export interface GetTxsEventResponseSDKType { + txs: TxSDKType[]; + tx_responses: TxResponseSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * BroadcastTxRequest is the request type for the Service.BroadcastTxRequest + * RPC method. + */ +export interface BroadcastTxRequest { + /** tx_bytes is the raw transaction. */ + txBytes: Uint8Array; + mode: BroadcastMode; +} +/** + * BroadcastTxRequest is the request type for the Service.BroadcastTxRequest + * RPC method. + */ +export interface BroadcastTxRequestSDKType { + tx_bytes: Uint8Array; + mode: BroadcastMode; +} +/** + * BroadcastTxResponse is the response type for the + * Service.BroadcastTx method. + */ +export interface BroadcastTxResponse { + /** tx_response is the queried TxResponses. */ + txResponse?: TxResponse; +} +/** + * BroadcastTxResponse is the response type for the + * Service.BroadcastTx method. + */ +export interface BroadcastTxResponseSDKType { + tx_response?: TxResponseSDKType; +} +/** + * SimulateRequest is the request type for the Service.Simulate + * RPC method. + */ +export interface SimulateRequest { + /** + * tx is the transaction to simulate. + * Deprecated. Send raw tx bytes instead. + */ + /** @deprecated */ + tx?: Tx; + /** + * tx_bytes is the raw transaction. + * + * Since: cosmos-sdk 0.43 + */ + txBytes: Uint8Array; +} +/** + * SimulateRequest is the request type for the Service.Simulate + * RPC method. + */ +export interface SimulateRequestSDKType { + /** @deprecated */ + tx?: TxSDKType; + tx_bytes: Uint8Array; +} +/** + * SimulateResponse is the response type for the + * Service.SimulateRPC method. + */ +export interface SimulateResponse { + /** gas_info is the information about gas used in the simulation. */ + gasInfo?: GasInfo; + /** result is the result of the simulation. */ + result?: Result; +} +/** + * SimulateResponse is the response type for the + * Service.SimulateRPC method. + */ +export interface SimulateResponseSDKType { + gas_info?: GasInfoSDKType; + result?: ResultSDKType; +} +/** + * GetTxRequest is the request type for the Service.GetTx + * RPC method. + */ +export interface GetTxRequest { + /** hash is the tx hash to query, encoded as a hex string. */ + hash: string; +} +/** + * GetTxRequest is the request type for the Service.GetTx + * RPC method. + */ +export interface GetTxRequestSDKType { + hash: string; +} +/** GetTxResponse is the response type for the Service.GetTx method. */ +export interface GetTxResponse { + /** tx is the queried transaction. */ + tx?: Tx; + /** tx_response is the queried TxResponses. */ + txResponse?: TxResponse; +} +/** GetTxResponse is the response type for the Service.GetTx method. */ +export interface GetTxResponseSDKType { + tx?: TxSDKType; + tx_response?: TxResponseSDKType; +} +/** + * GetBlockWithTxsRequest is the request type for the Service.GetBlockWithTxs + * RPC method. + * + * Since: cosmos-sdk 0.45.2 + */ +export interface GetBlockWithTxsRequest { + /** height is the height of the block to query. */ + height: Long; + /** pagination defines a pagination for the request. */ + pagination?: PageRequest; +} +/** + * GetBlockWithTxsRequest is the request type for the Service.GetBlockWithTxs + * RPC method. + * + * Since: cosmos-sdk 0.45.2 + */ +export interface GetBlockWithTxsRequestSDKType { + height: Long; + pagination?: PageRequestSDKType; +} +/** + * GetBlockWithTxsResponse is the response type for the Service.GetBlockWithTxs method. + * + * Since: cosmos-sdk 0.45.2 + */ +export interface GetBlockWithTxsResponse { + /** txs are the transactions in the block. */ + txs: Tx[]; + blockId?: BlockID; + block?: Block; + /** pagination defines a pagination for the response. */ + pagination?: PageResponse; +} +/** + * GetBlockWithTxsResponse is the response type for the Service.GetBlockWithTxs method. + * + * Since: cosmos-sdk 0.45.2 + */ +export interface GetBlockWithTxsResponseSDKType { + txs: TxSDKType[]; + block_id?: BlockIDSDKType; + block?: BlockSDKType; + pagination?: PageResponseSDKType; +} +export declare const GetTxsEventRequest: { + encode(message: GetTxsEventRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxsEventRequest; + fromPartial(object: DeepPartial): GetTxsEventRequest; +}; +export declare const GetTxsEventResponse: { + encode(message: GetTxsEventResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxsEventResponse; + fromPartial(object: DeepPartial): GetTxsEventResponse; +}; +export declare const BroadcastTxRequest: { + encode(message: BroadcastTxRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BroadcastTxRequest; + fromPartial(object: DeepPartial): BroadcastTxRequest; +}; +export declare const BroadcastTxResponse: { + encode(message: BroadcastTxResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BroadcastTxResponse; + fromPartial(object: DeepPartial): BroadcastTxResponse; +}; +export declare const SimulateRequest: { + encode(message: SimulateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SimulateRequest; + fromPartial(object: DeepPartial): SimulateRequest; +}; +export declare const SimulateResponse: { + encode(message: SimulateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SimulateResponse; + fromPartial(object: DeepPartial): SimulateResponse; +}; +export declare const GetTxRequest: { + encode(message: GetTxRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxRequest; + fromPartial(object: DeepPartial): GetTxRequest; +}; +export declare const GetTxResponse: { + encode(message: GetTxResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxResponse; + fromPartial(object: DeepPartial): GetTxResponse; +}; +export declare const GetBlockWithTxsRequest: { + encode(message: GetBlockWithTxsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockWithTxsRequest; + fromPartial(object: DeepPartial): GetBlockWithTxsRequest; +}; +export declare const GetBlockWithTxsResponse: { + encode(message: GetBlockWithTxsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockWithTxsResponse; + fromPartial(object: DeepPartial): GetBlockWithTxsResponse; +}; diff --git a/packages/codegen/dist/cosmos/tx/v1beta1/service.lcd.d.ts b/packages/codegen/dist/cosmos/tx/v1beta1/service.lcd.d.ts new file mode 100644 index 00000000..5a4e8c97 --- /dev/null +++ b/packages/codegen/dist/cosmos/tx/v1beta1/service.lcd.d.ts @@ -0,0 +1,11 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { GetTxRequest, GetTxResponseSDKType, GetTxsEventRequest, GetTxsEventResponseSDKType, GetBlockWithTxsRequest, GetBlockWithTxsResponseSDKType } from "./service"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + getTx(params: GetTxRequest): Promise; + getTxsEvent(params: GetTxsEventRequest): Promise; + getBlockWithTxs(params: GetBlockWithTxsRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/tx/v1beta1/service.rpc.Service.d.ts b/packages/codegen/dist/cosmos/tx/v1beta1/service.rpc.Service.d.ts new file mode 100644 index 00000000..8ce5bc4f --- /dev/null +++ b/packages/codegen/dist/cosmos/tx/v1beta1/service.rpc.Service.d.ts @@ -0,0 +1,36 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { SimulateRequest, SimulateResponse, GetTxRequest, GetTxResponse, BroadcastTxRequest, BroadcastTxResponse, GetTxsEventRequest, GetTxsEventResponse, GetBlockWithTxsRequest, GetBlockWithTxsResponse } from "./service"; +/** Service defines a gRPC service for interacting with transactions. */ +export interface Service { + /** Simulate simulates executing a transaction for estimating gas usage. */ + simulate(request: SimulateRequest): Promise; + /** GetTx fetches a tx by hash. */ + getTx(request: GetTxRequest): Promise; + /** BroadcastTx broadcast transaction. */ + broadcastTx(request: BroadcastTxRequest): Promise; + /** GetTxsEvent fetches txs by event. */ + getTxsEvent(request: GetTxsEventRequest): Promise; + /** + * GetBlockWithTxs fetches a block with decoded txs. + * + * Since: cosmos-sdk 0.45.2 + */ + getBlockWithTxs(request: GetBlockWithTxsRequest): Promise; +} +export declare class ServiceClientImpl implements Service { + private readonly rpc; + constructor(rpc: Rpc); + simulate(request: SimulateRequest): Promise; + getTx(request: GetTxRequest): Promise; + broadcastTx(request: BroadcastTxRequest): Promise; + getTxsEvent(request: GetTxsEventRequest): Promise; + getBlockWithTxs(request: GetBlockWithTxsRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + simulate(request: SimulateRequest): Promise; + getTx(request: GetTxRequest): Promise; + broadcastTx(request: BroadcastTxRequest): Promise; + getTxsEvent(request: GetTxsEventRequest): Promise; + getBlockWithTxs(request: GetBlockWithTxsRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/tx/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/tx/v1beta1/tx.d.ts new file mode 100644 index 00000000..57914e9b --- /dev/null +++ b/packages/codegen/dist/cosmos/tx/v1beta1/tx.d.ts @@ -0,0 +1,455 @@ +/// +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { SignMode } from "../signing/v1beta1/signing"; +import { CompactBitArray, CompactBitArraySDKType } from "../../crypto/multisig/v1beta1/multisig"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** Tx is the standard type used for broadcasting transactions. */ +export interface Tx { + /** body is the processable content of the transaction */ + body?: TxBody; + /** + * auth_info is the authorization related content of the transaction, + * specifically signers, signer modes and fee + */ + authInfo?: AuthInfo; + /** + * signatures is a list of signatures that matches the length and order of + * AuthInfo's signer_infos to allow connecting signature meta information like + * public key and signing mode by position. + */ + signatures: Uint8Array[]; +} +/** Tx is the standard type used for broadcasting transactions. */ +export interface TxSDKType { + body?: TxBodySDKType; + auth_info?: AuthInfoSDKType; + signatures: Uint8Array[]; +} +/** + * TxRaw is a variant of Tx that pins the signer's exact binary representation + * of body and auth_info. This is used for signing, broadcasting and + * verification. The binary `serialize(tx: TxRaw)` is stored in Tendermint and + * the hash `sha256(serialize(tx: TxRaw))` becomes the "txhash", commonly used + * as the transaction ID. + */ +export interface TxRaw { + /** + * body_bytes is a protobuf serialization of a TxBody that matches the + * representation in SignDoc. + */ + bodyBytes: Uint8Array; + /** + * auth_info_bytes is a protobuf serialization of an AuthInfo that matches the + * representation in SignDoc. + */ + authInfoBytes: Uint8Array; + /** + * signatures is a list of signatures that matches the length and order of + * AuthInfo's signer_infos to allow connecting signature meta information like + * public key and signing mode by position. + */ + signatures: Uint8Array[]; +} +/** + * TxRaw is a variant of Tx that pins the signer's exact binary representation + * of body and auth_info. This is used for signing, broadcasting and + * verification. The binary `serialize(tx: TxRaw)` is stored in Tendermint and + * the hash `sha256(serialize(tx: TxRaw))` becomes the "txhash", commonly used + * as the transaction ID. + */ +export interface TxRawSDKType { + body_bytes: Uint8Array; + auth_info_bytes: Uint8Array; + signatures: Uint8Array[]; +} +/** SignDoc is the type used for generating sign bytes for SIGN_MODE_DIRECT. */ +export interface SignDoc { + /** + * body_bytes is protobuf serialization of a TxBody that matches the + * representation in TxRaw. + */ + bodyBytes: Uint8Array; + /** + * auth_info_bytes is a protobuf serialization of an AuthInfo that matches the + * representation in TxRaw. + */ + authInfoBytes: Uint8Array; + /** + * chain_id is the unique identifier of the chain this transaction targets. + * It prevents signed transactions from being used on another chain by an + * attacker + */ + chainId: string; + /** account_number is the account number of the account in state */ + accountNumber: Long; +} +/** SignDoc is the type used for generating sign bytes for SIGN_MODE_DIRECT. */ +export interface SignDocSDKType { + body_bytes: Uint8Array; + auth_info_bytes: Uint8Array; + chain_id: string; + account_number: Long; +} +/** + * SignDocDirectAux is the type used for generating sign bytes for + * SIGN_MODE_DIRECT_AUX. + * + * Since: cosmos-sdk 0.46 + */ +export interface SignDocDirectAux { + /** + * body_bytes is protobuf serialization of a TxBody that matches the + * representation in TxRaw. + */ + bodyBytes: Uint8Array; + /** public_key is the public key of the signing account. */ + publicKey?: Any; + /** + * chain_id is the identifier of the chain this transaction targets. + * It prevents signed transactions from being used on another chain by an + * attacker. + */ + chainId: string; + /** account_number is the account number of the account in state. */ + accountNumber: Long; + /** sequence is the sequence number of the signing account. */ + sequence: Long; + /** + * Tip is the optional tip used for meta-transactions. It should be left + * empty if the signer is not the tipper for this transaction. + */ + tip?: Tip; +} +/** + * SignDocDirectAux is the type used for generating sign bytes for + * SIGN_MODE_DIRECT_AUX. + * + * Since: cosmos-sdk 0.46 + */ +export interface SignDocDirectAuxSDKType { + body_bytes: Uint8Array; + public_key?: AnySDKType; + chain_id: string; + account_number: Long; + sequence: Long; + tip?: TipSDKType; +} +/** TxBody is the body of a transaction that all signers sign over. */ +export interface TxBody { + /** + * messages is a list of messages to be executed. The required signers of + * those messages define the number and order of elements in AuthInfo's + * signer_infos and Tx's signatures. Each required signer address is added to + * the list only the first time it occurs. + * By convention, the first required signer (usually from the first message) + * is referred to as the primary signer and pays the fee for the whole + * transaction. + */ + messages: Any[]; + /** + * memo is any arbitrary note/comment to be added to the transaction. + * WARNING: in clients, any publicly exposed text should not be called memo, + * but should be called `note` instead (see https://github.com/cosmos/cosmos-sdk/issues/9122). + */ + memo: string; + /** + * timeout is the block height after which this transaction will not + * be processed by the chain + */ + timeoutHeight: Long; + /** + * extension_options are arbitrary options that can be added by chains + * when the default options are not sufficient. If any of these are present + * and can't be handled, the transaction will be rejected + */ + extensionOptions: Any[]; + /** + * extension_options are arbitrary options that can be added by chains + * when the default options are not sufficient. If any of these are present + * and can't be handled, they will be ignored + */ + nonCriticalExtensionOptions: Any[]; +} +/** TxBody is the body of a transaction that all signers sign over. */ +export interface TxBodySDKType { + messages: AnySDKType[]; + memo: string; + timeout_height: Long; + extension_options: AnySDKType[]; + non_critical_extension_options: AnySDKType[]; +} +/** + * AuthInfo describes the fee and signer modes that are used to sign a + * transaction. + */ +export interface AuthInfo { + /** + * signer_infos defines the signing modes for the required signers. The number + * and order of elements must match the required signers from TxBody's + * messages. The first element is the primary signer and the one which pays + * the fee. + */ + signerInfos: SignerInfo[]; + /** + * Fee is the fee and gas limit for the transaction. The first signer is the + * primary signer and the one which pays the fee. The fee can be calculated + * based on the cost of evaluating the body and doing signature verification + * of the signers. This can be estimated via simulation. + */ + fee?: Fee; + /** + * Tip is the optional tip used for meta-transactions. + * + * Since: cosmos-sdk 0.46 + */ + tip?: Tip; +} +/** + * AuthInfo describes the fee and signer modes that are used to sign a + * transaction. + */ +export interface AuthInfoSDKType { + signer_infos: SignerInfoSDKType[]; + fee?: FeeSDKType; + tip?: TipSDKType; +} +/** + * SignerInfo describes the public key and signing mode of a single top-level + * signer. + */ +export interface SignerInfo { + /** + * public_key is the public key of the signer. It is optional for accounts + * that already exist in state. If unset, the verifier can use the required \ + * signer address for this position and lookup the public key. + */ + publicKey?: Any; + /** + * mode_info describes the signing mode of the signer and is a nested + * structure to support nested multisig pubkey's + */ + modeInfo?: ModeInfo; + /** + * sequence is the sequence of the account, which describes the + * number of committed transactions signed by a given address. It is used to + * prevent replay attacks. + */ + sequence: Long; +} +/** + * SignerInfo describes the public key and signing mode of a single top-level + * signer. + */ +export interface SignerInfoSDKType { + public_key?: AnySDKType; + mode_info?: ModeInfoSDKType; + sequence: Long; +} +/** ModeInfo describes the signing mode of a single or nested multisig signer. */ +export interface ModeInfo { + /** single represents a single signer */ + single?: ModeInfo_Single; + /** multi represents a nested multisig signer */ + multi?: ModeInfo_Multi; +} +/** ModeInfo describes the signing mode of a single or nested multisig signer. */ +export interface ModeInfoSDKType { + single?: ModeInfo_SingleSDKType; + multi?: ModeInfo_MultiSDKType; +} +/** + * Single is the mode info for a single signer. It is structured as a message + * to allow for additional fields such as locale for SIGN_MODE_TEXTUAL in the + * future + */ +export interface ModeInfo_Single { + /** mode is the signing mode of the single signer */ + mode: SignMode; +} +/** + * Single is the mode info for a single signer. It is structured as a message + * to allow for additional fields such as locale for SIGN_MODE_TEXTUAL in the + * future + */ +export interface ModeInfo_SingleSDKType { + mode: SignMode; +} +/** Multi is the mode info for a multisig public key */ +export interface ModeInfo_Multi { + /** bitarray specifies which keys within the multisig are signing */ + bitarray?: CompactBitArray; + /** + * mode_infos is the corresponding modes of the signers of the multisig + * which could include nested multisig public keys + */ + modeInfos: ModeInfo[]; +} +/** Multi is the mode info for a multisig public key */ +export interface ModeInfo_MultiSDKType { + bitarray?: CompactBitArraySDKType; + mode_infos: ModeInfoSDKType[]; +} +/** + * Fee includes the amount of coins paid in fees and the maximum + * gas to be used by the transaction. The ratio yields an effective "gasprice", + * which must be above some miminum to be accepted into the mempool. + */ +export interface Fee { + /** amount is the amount of coins to be paid as a fee */ + amount: Coin[]; + /** + * gas_limit is the maximum gas that can be used in transaction processing + * before an out of gas error occurs + */ + gasLimit: Long; + /** + * if unset, the first signer is responsible for paying the fees. If set, the specified account must pay the fees. + * the payer must be a tx signer (and thus have signed this field in AuthInfo). + * setting this field does *not* change the ordering of required signers for the transaction. + */ + payer: string; + /** + * if set, the fee payer (either the first signer or the value of the payer field) requests that a fee grant be used + * to pay fees instead of the fee payer's own balance. If an appropriate fee grant does not exist or the chain does + * not support fee grants, this will fail + */ + granter: string; +} +/** + * Fee includes the amount of coins paid in fees and the maximum + * gas to be used by the transaction. The ratio yields an effective "gasprice", + * which must be above some miminum to be accepted into the mempool. + */ +export interface FeeSDKType { + amount: CoinSDKType[]; + gas_limit: Long; + payer: string; + granter: string; +} +/** + * Tip is the tip used for meta-transactions. + * + * Since: cosmos-sdk 0.46 + */ +export interface Tip { + /** amount is the amount of the tip */ + amount: Coin[]; + /** tipper is the address of the account paying for the tip */ + tipper: string; +} +/** + * Tip is the tip used for meta-transactions. + * + * Since: cosmos-sdk 0.46 + */ +export interface TipSDKType { + amount: CoinSDKType[]; + tipper: string; +} +/** + * AuxSignerData is the intermediary format that an auxiliary signer (e.g. a + * tipper) builds and sends to the fee payer (who will build and broadcast the + * actual tx). AuxSignerData is not a valid tx in itself, and will be rejected + * by the node if sent directly as-is. + * + * Since: cosmos-sdk 0.46 + */ +export interface AuxSignerData { + /** + * address is the bech32-encoded address of the auxiliary signer. If using + * AuxSignerData across different chains, the bech32 prefix of the target + * chain (where the final transaction is broadcasted) should be used. + */ + address: string; + /** + * sign_doc is the SIGN_MOD_DIRECT_AUX sign doc that the auxiliary signer + * signs. Note: we use the same sign doc even if we're signing with + * LEGACY_AMINO_JSON. + */ + signDoc?: SignDocDirectAux; + /** mode is the signing mode of the single signer */ + mode: SignMode; + /** sig is the signature of the sign doc. */ + sig: Uint8Array; +} +/** + * AuxSignerData is the intermediary format that an auxiliary signer (e.g. a + * tipper) builds and sends to the fee payer (who will build and broadcast the + * actual tx). AuxSignerData is not a valid tx in itself, and will be rejected + * by the node if sent directly as-is. + * + * Since: cosmos-sdk 0.46 + */ +export interface AuxSignerDataSDKType { + address: string; + sign_doc?: SignDocDirectAuxSDKType; + mode: SignMode; + sig: Uint8Array; +} +export declare const Tx: { + encode(message: Tx, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Tx; + fromPartial(object: DeepPartial): Tx; +}; +export declare const TxRaw: { + encode(message: TxRaw, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TxRaw; + fromPartial(object: DeepPartial): TxRaw; +}; +export declare const SignDoc: { + encode(message: SignDoc, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignDoc; + fromPartial(object: DeepPartial): SignDoc; +}; +export declare const SignDocDirectAux: { + encode(message: SignDocDirectAux, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignDocDirectAux; + fromPartial(object: DeepPartial): SignDocDirectAux; +}; +export declare const TxBody: { + encode(message: TxBody, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TxBody; + fromPartial(object: DeepPartial): TxBody; +}; +export declare const AuthInfo: { + encode(message: AuthInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AuthInfo; + fromPartial(object: DeepPartial): AuthInfo; +}; +export declare const SignerInfo: { + encode(message: SignerInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignerInfo; + fromPartial(object: DeepPartial): SignerInfo; +}; +export declare const ModeInfo: { + encode(message: ModeInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ModeInfo; + fromPartial(object: DeepPartial): ModeInfo; +}; +export declare const ModeInfo_Single: { + encode(message: ModeInfo_Single, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ModeInfo_Single; + fromPartial(object: DeepPartial): ModeInfo_Single; +}; +export declare const ModeInfo_Multi: { + encode(message: ModeInfo_Multi, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ModeInfo_Multi; + fromPartial(object: DeepPartial): ModeInfo_Multi; +}; +export declare const Fee: { + encode(message: Fee, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Fee; + fromPartial(object: DeepPartial): Fee; +}; +export declare const Tip: { + encode(message: Tip, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Tip; + fromPartial(object: DeepPartial): Tip; +}; +export declare const AuxSignerData: { + encode(message: AuxSignerData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AuxSignerData; + fromPartial(object: DeepPartial): AuxSignerData; +}; diff --git a/packages/codegen/dist/cosmos/upgrade/v1beta1/query.d.ts b/packages/codegen/dist/cosmos/upgrade/v1beta1/query.d.ts new file mode 100644 index 00000000..1de684a7 --- /dev/null +++ b/packages/codegen/dist/cosmos/upgrade/v1beta1/query.d.ts @@ -0,0 +1,220 @@ +/// +import { Plan, PlanSDKType, ModuleVersion, ModuleVersionSDKType } from "./upgrade"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** + * QueryCurrentPlanRequest is the request type for the Query/CurrentPlan RPC + * method. + */ +export interface QueryCurrentPlanRequest { +} +/** + * QueryCurrentPlanRequest is the request type for the Query/CurrentPlan RPC + * method. + */ +export interface QueryCurrentPlanRequestSDKType { +} +/** + * QueryCurrentPlanResponse is the response type for the Query/CurrentPlan RPC + * method. + */ +export interface QueryCurrentPlanResponse { + /** plan is the current upgrade plan. */ + plan?: Plan; +} +/** + * QueryCurrentPlanResponse is the response type for the Query/CurrentPlan RPC + * method. + */ +export interface QueryCurrentPlanResponseSDKType { + plan?: PlanSDKType; +} +/** + * QueryCurrentPlanRequest is the request type for the Query/AppliedPlan RPC + * method. + */ +export interface QueryAppliedPlanRequest { + /** name is the name of the applied plan to query for. */ + name: string; +} +/** + * QueryCurrentPlanRequest is the request type for the Query/AppliedPlan RPC + * method. + */ +export interface QueryAppliedPlanRequestSDKType { + name: string; +} +/** + * QueryAppliedPlanResponse is the response type for the Query/AppliedPlan RPC + * method. + */ +export interface QueryAppliedPlanResponse { + /** height is the block height at which the plan was applied. */ + height: Long; +} +/** + * QueryAppliedPlanResponse is the response type for the Query/AppliedPlan RPC + * method. + */ +export interface QueryAppliedPlanResponseSDKType { + height: Long; +} +/** + * QueryUpgradedConsensusStateRequest is the request type for the Query/UpgradedConsensusState + * RPC method. + */ +/** @deprecated */ +export interface QueryUpgradedConsensusStateRequest { + /** + * last height of the current chain must be sent in request + * as this is the height under which next consensus state is stored + */ + lastHeight: Long; +} +/** + * QueryUpgradedConsensusStateRequest is the request type for the Query/UpgradedConsensusState + * RPC method. + */ +/** @deprecated */ +export interface QueryUpgradedConsensusStateRequestSDKType { + last_height: Long; +} +/** + * QueryUpgradedConsensusStateResponse is the response type for the Query/UpgradedConsensusState + * RPC method. + */ +/** @deprecated */ +export interface QueryUpgradedConsensusStateResponse { + /** Since: cosmos-sdk 0.43 */ + upgradedConsensusState: Uint8Array; +} +/** + * QueryUpgradedConsensusStateResponse is the response type for the Query/UpgradedConsensusState + * RPC method. + */ +/** @deprecated */ +export interface QueryUpgradedConsensusStateResponseSDKType { + upgraded_consensus_state: Uint8Array; +} +/** + * QueryModuleVersionsRequest is the request type for the Query/ModuleVersions + * RPC method. + * + * Since: cosmos-sdk 0.43 + */ +export interface QueryModuleVersionsRequest { + /** + * module_name is a field to query a specific module + * consensus version from state. Leaving this empty will + * fetch the full list of module versions from state + */ + moduleName: string; +} +/** + * QueryModuleVersionsRequest is the request type for the Query/ModuleVersions + * RPC method. + * + * Since: cosmos-sdk 0.43 + */ +export interface QueryModuleVersionsRequestSDKType { + module_name: string; +} +/** + * QueryModuleVersionsResponse is the response type for the Query/ModuleVersions + * RPC method. + * + * Since: cosmos-sdk 0.43 + */ +export interface QueryModuleVersionsResponse { + /** module_versions is a list of module names with their consensus versions. */ + moduleVersions: ModuleVersion[]; +} +/** + * QueryModuleVersionsResponse is the response type for the Query/ModuleVersions + * RPC method. + * + * Since: cosmos-sdk 0.43 + */ +export interface QueryModuleVersionsResponseSDKType { + module_versions: ModuleVersionSDKType[]; +} +/** + * QueryAuthorityRequest is the request type for Query/Authority + * + * Since: cosmos-sdk 0.46 + */ +export interface QueryAuthorityRequest { +} +/** + * QueryAuthorityRequest is the request type for Query/Authority + * + * Since: cosmos-sdk 0.46 + */ +export interface QueryAuthorityRequestSDKType { +} +/** + * QueryAuthorityResponse is the response type for Query/Authority + * + * Since: cosmos-sdk 0.46 + */ +export interface QueryAuthorityResponse { + address: string; +} +/** + * QueryAuthorityResponse is the response type for Query/Authority + * + * Since: cosmos-sdk 0.46 + */ +export interface QueryAuthorityResponseSDKType { + address: string; +} +export declare const QueryCurrentPlanRequest: { + encode(_: QueryCurrentPlanRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCurrentPlanRequest; + fromPartial(_: DeepPartial): QueryCurrentPlanRequest; +}; +export declare const QueryCurrentPlanResponse: { + encode(message: QueryCurrentPlanResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCurrentPlanResponse; + fromPartial(object: DeepPartial): QueryCurrentPlanResponse; +}; +export declare const QueryAppliedPlanRequest: { + encode(message: QueryAppliedPlanRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAppliedPlanRequest; + fromPartial(object: DeepPartial): QueryAppliedPlanRequest; +}; +export declare const QueryAppliedPlanResponse: { + encode(message: QueryAppliedPlanResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAppliedPlanResponse; + fromPartial(object: DeepPartial): QueryAppliedPlanResponse; +}; +export declare const QueryUpgradedConsensusStateRequest: { + encode(message: QueryUpgradedConsensusStateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedConsensusStateRequest; + fromPartial(object: DeepPartial): QueryUpgradedConsensusStateRequest; +}; +export declare const QueryUpgradedConsensusStateResponse: { + encode(message: QueryUpgradedConsensusStateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedConsensusStateResponse; + fromPartial(object: DeepPartial): QueryUpgradedConsensusStateResponse; +}; +export declare const QueryModuleVersionsRequest: { + encode(message: QueryModuleVersionsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleVersionsRequest; + fromPartial(object: DeepPartial): QueryModuleVersionsRequest; +}; +export declare const QueryModuleVersionsResponse: { + encode(message: QueryModuleVersionsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleVersionsResponse; + fromPartial(object: DeepPartial): QueryModuleVersionsResponse; +}; +export declare const QueryAuthorityRequest: { + encode(_: QueryAuthorityRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAuthorityRequest; + fromPartial(_: DeepPartial): QueryAuthorityRequest; +}; +export declare const QueryAuthorityResponse: { + encode(message: QueryAuthorityResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAuthorityResponse; + fromPartial(object: DeepPartial): QueryAuthorityResponse; +}; diff --git a/packages/codegen/dist/cosmos/upgrade/v1beta1/query.lcd.d.ts b/packages/codegen/dist/cosmos/upgrade/v1beta1/query.lcd.d.ts new file mode 100644 index 00000000..2096c970 --- /dev/null +++ b/packages/codegen/dist/cosmos/upgrade/v1beta1/query.lcd.d.ts @@ -0,0 +1,13 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryCurrentPlanRequest, QueryCurrentPlanResponseSDKType, QueryAppliedPlanRequest, QueryAppliedPlanResponseSDKType, QueryUpgradedConsensusStateRequest, QueryUpgradedConsensusStateResponseSDKType, QueryModuleVersionsRequest, QueryModuleVersionsResponseSDKType, QueryAuthorityRequest, QueryAuthorityResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + currentPlan(_params?: QueryCurrentPlanRequest): Promise; + appliedPlan(params: QueryAppliedPlanRequest): Promise; + upgradedConsensusState(params: QueryUpgradedConsensusStateRequest): Promise; + moduleVersions(params: QueryModuleVersionsRequest): Promise; + authority(_params?: QueryAuthorityRequest): Promise; +} diff --git a/packages/codegen/dist/cosmos/upgrade/v1beta1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmos/upgrade/v1beta1/query.rpc.Query.d.ts new file mode 100644 index 00000000..f12df5a9 --- /dev/null +++ b/packages/codegen/dist/cosmos/upgrade/v1beta1/query.rpc.Query.d.ts @@ -0,0 +1,43 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryCurrentPlanRequest, QueryCurrentPlanResponse, QueryAppliedPlanRequest, QueryAppliedPlanResponse, QueryUpgradedConsensusStateRequest, QueryUpgradedConsensusStateResponse, QueryModuleVersionsRequest, QueryModuleVersionsResponse, QueryAuthorityRequest, QueryAuthorityResponse } from "./query"; +/** Query defines the gRPC upgrade querier service. */ +export interface Query { + /** CurrentPlan queries the current upgrade plan. */ + currentPlan(request?: QueryCurrentPlanRequest): Promise; + /** AppliedPlan queries a previously applied upgrade plan by its name. */ + appliedPlan(request: QueryAppliedPlanRequest): Promise; + /** + * UpgradedConsensusState queries the consensus state that will serve + * as a trusted kernel for the next version of this chain. It will only be + * stored at the last height of this chain. + * UpgradedConsensusState RPC not supported with legacy querier + * This rpc is deprecated now that IBC has its own replacement + * (https://github.com/cosmos/ibc-go/blob/2c880a22e9f9cc75f62b527ca94aa75ce1106001/proto/ibc/core/client/v1/query.proto#L54) + */ + upgradedConsensusState(request: QueryUpgradedConsensusStateRequest): Promise; + /** + * ModuleVersions queries the list of module versions from state. + * + * Since: cosmos-sdk 0.43 + */ + moduleVersions(request: QueryModuleVersionsRequest): Promise; + /** Returns the account with authority to conduct upgrades */ + authority(request?: QueryAuthorityRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + currentPlan(request?: QueryCurrentPlanRequest): Promise; + appliedPlan(request: QueryAppliedPlanRequest): Promise; + upgradedConsensusState(request: QueryUpgradedConsensusStateRequest): Promise; + moduleVersions(request: QueryModuleVersionsRequest): Promise; + authority(request?: QueryAuthorityRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + currentPlan(request?: QueryCurrentPlanRequest): Promise; + appliedPlan(request: QueryAppliedPlanRequest): Promise; + upgradedConsensusState(request: QueryUpgradedConsensusStateRequest): Promise; + moduleVersions(request: QueryModuleVersionsRequest): Promise; + authority(request?: QueryAuthorityRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmos/upgrade/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/upgrade/v1beta1/tx.d.ts new file mode 100644 index 00000000..c136e51b --- /dev/null +++ b/packages/codegen/dist/cosmos/upgrade/v1beta1/tx.d.ts @@ -0,0 +1,88 @@ +import { Plan, PlanSDKType } from "./upgrade"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * MsgSoftwareUpgrade is the Msg/SoftwareUpgrade request type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgSoftwareUpgrade { + /** authority is the address of the governance account. */ + authority: string; + /** plan is the upgrade plan. */ + plan?: Plan; +} +/** + * MsgSoftwareUpgrade is the Msg/SoftwareUpgrade request type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgSoftwareUpgradeSDKType { + authority: string; + plan?: PlanSDKType; +} +/** + * MsgSoftwareUpgradeResponse is the Msg/SoftwareUpgrade response type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgSoftwareUpgradeResponse { +} +/** + * MsgSoftwareUpgradeResponse is the Msg/SoftwareUpgrade response type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgSoftwareUpgradeResponseSDKType { +} +/** + * MsgCancelUpgrade is the Msg/CancelUpgrade request type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgCancelUpgrade { + /** authority is the address of the governance account. */ + authority: string; +} +/** + * MsgCancelUpgrade is the Msg/CancelUpgrade request type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgCancelUpgradeSDKType { + authority: string; +} +/** + * MsgCancelUpgradeResponse is the Msg/CancelUpgrade response type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgCancelUpgradeResponse { +} +/** + * MsgCancelUpgradeResponse is the Msg/CancelUpgrade response type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgCancelUpgradeResponseSDKType { +} +export declare const MsgSoftwareUpgrade: { + encode(message: MsgSoftwareUpgrade, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSoftwareUpgrade; + fromPartial(object: DeepPartial): MsgSoftwareUpgrade; +}; +export declare const MsgSoftwareUpgradeResponse: { + encode(_: MsgSoftwareUpgradeResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSoftwareUpgradeResponse; + fromPartial(_: DeepPartial): MsgSoftwareUpgradeResponse; +}; +export declare const MsgCancelUpgrade: { + encode(message: MsgCancelUpgrade, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCancelUpgrade; + fromPartial(object: DeepPartial): MsgCancelUpgrade; +}; +export declare const MsgCancelUpgradeResponse: { + encode(_: MsgCancelUpgradeResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCancelUpgradeResponse; + fromPartial(_: DeepPartial): MsgCancelUpgradeResponse; +}; diff --git a/packages/codegen/dist/cosmos/upgrade/v1beta1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/upgrade/v1beta1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..66d0b9a7 --- /dev/null +++ b/packages/codegen/dist/cosmos/upgrade/v1beta1/tx.rpc.msg.d.ts @@ -0,0 +1,24 @@ +import { Rpc } from "../../../helpers"; +import { MsgSoftwareUpgrade, MsgSoftwareUpgradeResponse, MsgCancelUpgrade, MsgCancelUpgradeResponse } from "./tx"; +/** Msg defines the upgrade Msg service. */ +export interface Msg { + /** + * SoftwareUpgrade is a governance operation for initiating a software upgrade. + * + * Since: cosmos-sdk 0.46 + */ + softwareUpgrade(request: MsgSoftwareUpgrade): Promise; + /** + * CancelUpgrade is a governance operation for cancelling a previously + * approvid software upgrade. + * + * Since: cosmos-sdk 0.46 + */ + cancelUpgrade(request: MsgCancelUpgrade): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + softwareUpgrade(request: MsgSoftwareUpgrade): Promise; + cancelUpgrade(request: MsgCancelUpgrade): Promise; +} diff --git a/packages/codegen/dist/cosmos/upgrade/v1beta1/upgrade.d.ts b/packages/codegen/dist/cosmos/upgrade/v1beta1/upgrade.d.ts new file mode 100644 index 00000000..efd09eb7 --- /dev/null +++ b/packages/codegen/dist/cosmos/upgrade/v1beta1/upgrade.d.ts @@ -0,0 +1,137 @@ +/// +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** Plan specifies information about a planned upgrade and when it should occur. */ +export interface Plan { + /** + * Sets the name for the upgrade. This name will be used by the upgraded + * version of the software to apply any special "on-upgrade" commands during + * the first BeginBlock method after the upgrade is applied. It is also used + * to detect whether a software version can handle a given upgrade. If no + * upgrade handler with this name has been set in the software, it will be + * assumed that the software is out-of-date when the upgrade Time or Height is + * reached and the software will exit. + */ + name: string; + /** + * Deprecated: Time based upgrades have been deprecated. Time based upgrade logic + * has been removed from the SDK. + * If this field is not empty, an error will be thrown. + */ + /** @deprecated */ + time?: Date; + /** + * The height at which the upgrade must be performed. + * Only used if Time is not set. + */ + height: Long; + /** + * Any application specific upgrade info to be included on-chain + * such as a git commit that validators could automatically upgrade to + */ + info: string; + /** + * Deprecated: UpgradedClientState field has been deprecated. IBC upgrade logic has been + * moved to the IBC module in the sub module 02-client. + * If this field is not empty, an error will be thrown. + */ + /** @deprecated */ + upgradedClientState?: Any; +} +/** Plan specifies information about a planned upgrade and when it should occur. */ +export interface PlanSDKType { + name: string; + /** @deprecated */ + time?: Date; + height: Long; + info: string; + /** @deprecated */ + upgraded_client_state?: AnySDKType; +} +/** + * SoftwareUpgradeProposal is a gov Content type for initiating a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgSoftwareUpgrade. + */ +/** @deprecated */ +export interface SoftwareUpgradeProposal { + title: string; + description: string; + plan?: Plan; +} +/** + * SoftwareUpgradeProposal is a gov Content type for initiating a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgSoftwareUpgrade. + */ +/** @deprecated */ +export interface SoftwareUpgradeProposalSDKType { + title: string; + description: string; + plan?: PlanSDKType; +} +/** + * CancelSoftwareUpgradeProposal is a gov Content type for cancelling a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgCancelUpgrade. + */ +/** @deprecated */ +export interface CancelSoftwareUpgradeProposal { + title: string; + description: string; +} +/** + * CancelSoftwareUpgradeProposal is a gov Content type for cancelling a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgCancelUpgrade. + */ +/** @deprecated */ +export interface CancelSoftwareUpgradeProposalSDKType { + title: string; + description: string; +} +/** + * ModuleVersion specifies a module and its consensus version. + * + * Since: cosmos-sdk 0.43 + */ +export interface ModuleVersion { + /** name of the app module */ + name: string; + /** consensus version of the app module */ + version: Long; +} +/** + * ModuleVersion specifies a module and its consensus version. + * + * Since: cosmos-sdk 0.43 + */ +export interface ModuleVersionSDKType { + name: string; + version: Long; +} +export declare const Plan: { + encode(message: Plan, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Plan; + fromPartial(object: DeepPartial): Plan; +}; +export declare const SoftwareUpgradeProposal: { + encode(message: SoftwareUpgradeProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SoftwareUpgradeProposal; + fromPartial(object: DeepPartial): SoftwareUpgradeProposal; +}; +export declare const CancelSoftwareUpgradeProposal: { + encode(message: CancelSoftwareUpgradeProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CancelSoftwareUpgradeProposal; + fromPartial(object: DeepPartial): CancelSoftwareUpgradeProposal; +}; +export declare const ModuleVersion: { + encode(message: ModuleVersion, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleVersion; + fromPartial(object: DeepPartial): ModuleVersion; +}; diff --git a/packages/codegen/dist/cosmos/vesting/v1beta1/tx.d.ts b/packages/codegen/dist/cosmos/vesting/v1beta1/tx.d.ts new file mode 100644 index 00000000..14951d29 --- /dev/null +++ b/packages/codegen/dist/cosmos/vesting/v1beta1/tx.d.ts @@ -0,0 +1,119 @@ +/// +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Period, PeriodSDKType } from "./vesting"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * MsgCreateVestingAccount defines a message that enables creating a vesting + * account. + */ +export interface MsgCreateVestingAccount { + fromAddress: string; + toAddress: string; + amount: Coin[]; + endTime: Long; + delayed: boolean; +} +/** + * MsgCreateVestingAccount defines a message that enables creating a vesting + * account. + */ +export interface MsgCreateVestingAccountSDKType { + from_address: string; + to_address: string; + amount: CoinSDKType[]; + end_time: Long; + delayed: boolean; +} +/** MsgCreateVestingAccountResponse defines the Msg/CreateVestingAccount response type. */ +export interface MsgCreateVestingAccountResponse { +} +/** MsgCreateVestingAccountResponse defines the Msg/CreateVestingAccount response type. */ +export interface MsgCreateVestingAccountResponseSDKType { +} +/** + * MsgCreatePermanentLockedAccount defines a message that enables creating a permanent + * locked account. + */ +export interface MsgCreatePermanentLockedAccount { + fromAddress: string; + toAddress: string; + amount: Coin[]; +} +/** + * MsgCreatePermanentLockedAccount defines a message that enables creating a permanent + * locked account. + */ +export interface MsgCreatePermanentLockedAccountSDKType { + from_address: string; + to_address: string; + amount: CoinSDKType[]; +} +/** MsgCreatePermanentLockedAccountResponse defines the Msg/CreatePermanentLockedAccount response type. */ +export interface MsgCreatePermanentLockedAccountResponse { +} +/** MsgCreatePermanentLockedAccountResponse defines the Msg/CreatePermanentLockedAccount response type. */ +export interface MsgCreatePermanentLockedAccountResponseSDKType { +} +/** + * MsgCreateVestingAccount defines a message that enables creating a vesting + * account. + */ +export interface MsgCreatePeriodicVestingAccount { + fromAddress: string; + toAddress: string; + startTime: Long; + vestingPeriods: Period[]; +} +/** + * MsgCreateVestingAccount defines a message that enables creating a vesting + * account. + */ +export interface MsgCreatePeriodicVestingAccountSDKType { + from_address: string; + to_address: string; + start_time: Long; + vesting_periods: PeriodSDKType[]; +} +/** + * MsgCreateVestingAccountResponse defines the Msg/CreatePeriodicVestingAccount + * response type. + */ +export interface MsgCreatePeriodicVestingAccountResponse { +} +/** + * MsgCreateVestingAccountResponse defines the Msg/CreatePeriodicVestingAccount + * response type. + */ +export interface MsgCreatePeriodicVestingAccountResponseSDKType { +} +export declare const MsgCreateVestingAccount: { + encode(message: MsgCreateVestingAccount, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateVestingAccount; + fromPartial(object: DeepPartial): MsgCreateVestingAccount; +}; +export declare const MsgCreateVestingAccountResponse: { + encode(_: MsgCreateVestingAccountResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateVestingAccountResponse; + fromPartial(_: DeepPartial): MsgCreateVestingAccountResponse; +}; +export declare const MsgCreatePermanentLockedAccount: { + encode(message: MsgCreatePermanentLockedAccount, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreatePermanentLockedAccount; + fromPartial(object: DeepPartial): MsgCreatePermanentLockedAccount; +}; +export declare const MsgCreatePermanentLockedAccountResponse: { + encode(_: MsgCreatePermanentLockedAccountResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreatePermanentLockedAccountResponse; + fromPartial(_: DeepPartial): MsgCreatePermanentLockedAccountResponse; +}; +export declare const MsgCreatePeriodicVestingAccount: { + encode(message: MsgCreatePeriodicVestingAccount, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreatePeriodicVestingAccount; + fromPartial(object: DeepPartial): MsgCreatePeriodicVestingAccount; +}; +export declare const MsgCreatePeriodicVestingAccountResponse: { + encode(_: MsgCreatePeriodicVestingAccountResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreatePeriodicVestingAccountResponse; + fromPartial(_: DeepPartial): MsgCreatePeriodicVestingAccountResponse; +}; diff --git a/packages/codegen/dist/cosmos/vesting/v1beta1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmos/vesting/v1beta1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..9999aa89 --- /dev/null +++ b/packages/codegen/dist/cosmos/vesting/v1beta1/tx.rpc.msg.d.ts @@ -0,0 +1,27 @@ +import { Rpc } from "../../../helpers"; +import { MsgCreateVestingAccount, MsgCreateVestingAccountResponse, MsgCreatePermanentLockedAccount, MsgCreatePermanentLockedAccountResponse, MsgCreatePeriodicVestingAccount, MsgCreatePeriodicVestingAccountResponse } from "./tx"; +/** Msg defines the bank Msg service. */ +export interface Msg { + /** + * CreateVestingAccount defines a method that enables creating a vesting + * account. + */ + createVestingAccount(request: MsgCreateVestingAccount): Promise; + /** + * CreatePermanentLockedAccount defines a method that enables creating a permanent + * locked account. + */ + createPermanentLockedAccount(request: MsgCreatePermanentLockedAccount): Promise; + /** + * CreatePeriodicVestingAccount defines a method that enables creating a + * periodic vesting account. + */ + createPeriodicVestingAccount(request: MsgCreatePeriodicVestingAccount): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + createVestingAccount(request: MsgCreateVestingAccount): Promise; + createPermanentLockedAccount(request: MsgCreatePermanentLockedAccount): Promise; + createPeriodicVestingAccount(request: MsgCreatePeriodicVestingAccount): Promise; +} diff --git a/packages/codegen/dist/cosmos/vesting/v1beta1/vesting.d.ts b/packages/codegen/dist/cosmos/vesting/v1beta1/vesting.d.ts new file mode 100644 index 00000000..a0c7856e --- /dev/null +++ b/packages/codegen/dist/cosmos/vesting/v1beta1/vesting.d.ts @@ -0,0 +1,137 @@ +/// +import { BaseAccount, BaseAccountSDKType } from "../../auth/v1beta1/auth"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * BaseVestingAccount implements the VestingAccount interface. It contains all + * the necessary fields needed for any vesting account implementation. + */ +export interface BaseVestingAccount { + baseAccount?: BaseAccount; + originalVesting: Coin[]; + delegatedFree: Coin[]; + delegatedVesting: Coin[]; + endTime: Long; +} +/** + * BaseVestingAccount implements the VestingAccount interface. It contains all + * the necessary fields needed for any vesting account implementation. + */ +export interface BaseVestingAccountSDKType { + base_account?: BaseAccountSDKType; + original_vesting: CoinSDKType[]; + delegated_free: CoinSDKType[]; + delegated_vesting: CoinSDKType[]; + end_time: Long; +} +/** + * ContinuousVestingAccount implements the VestingAccount interface. It + * continuously vests by unlocking coins linearly with respect to time. + */ +export interface ContinuousVestingAccount { + baseVestingAccount?: BaseVestingAccount; + startTime: Long; +} +/** + * ContinuousVestingAccount implements the VestingAccount interface. It + * continuously vests by unlocking coins linearly with respect to time. + */ +export interface ContinuousVestingAccountSDKType { + base_vesting_account?: BaseVestingAccountSDKType; + start_time: Long; +} +/** + * DelayedVestingAccount implements the VestingAccount interface. It vests all + * coins after a specific time, but non prior. In other words, it keeps them + * locked until a specified time. + */ +export interface DelayedVestingAccount { + baseVestingAccount?: BaseVestingAccount; +} +/** + * DelayedVestingAccount implements the VestingAccount interface. It vests all + * coins after a specific time, but non prior. In other words, it keeps them + * locked until a specified time. + */ +export interface DelayedVestingAccountSDKType { + base_vesting_account?: BaseVestingAccountSDKType; +} +/** Period defines a length of time and amount of coins that will vest. */ +export interface Period { + length: Long; + amount: Coin[]; +} +/** Period defines a length of time and amount of coins that will vest. */ +export interface PeriodSDKType { + length: Long; + amount: CoinSDKType[]; +} +/** + * PeriodicVestingAccount implements the VestingAccount interface. It + * periodically vests by unlocking coins during each specified period. + */ +export interface PeriodicVestingAccount { + baseVestingAccount?: BaseVestingAccount; + startTime: Long; + vestingPeriods: Period[]; +} +/** + * PeriodicVestingAccount implements the VestingAccount interface. It + * periodically vests by unlocking coins during each specified period. + */ +export interface PeriodicVestingAccountSDKType { + base_vesting_account?: BaseVestingAccountSDKType; + start_time: Long; + vesting_periods: PeriodSDKType[]; +} +/** + * PermanentLockedAccount implements the VestingAccount interface. It does + * not ever release coins, locking them indefinitely. Coins in this account can + * still be used for delegating and for governance votes even while locked. + * + * Since: cosmos-sdk 0.43 + */ +export interface PermanentLockedAccount { + baseVestingAccount?: BaseVestingAccount; +} +/** + * PermanentLockedAccount implements the VestingAccount interface. It does + * not ever release coins, locking them indefinitely. Coins in this account can + * still be used for delegating and for governance votes even while locked. + * + * Since: cosmos-sdk 0.43 + */ +export interface PermanentLockedAccountSDKType { + base_vesting_account?: BaseVestingAccountSDKType; +} +export declare const BaseVestingAccount: { + encode(message: BaseVestingAccount, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BaseVestingAccount; + fromPartial(object: DeepPartial): BaseVestingAccount; +}; +export declare const ContinuousVestingAccount: { + encode(message: ContinuousVestingAccount, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ContinuousVestingAccount; + fromPartial(object: DeepPartial): ContinuousVestingAccount; +}; +export declare const DelayedVestingAccount: { + encode(message: DelayedVestingAccount, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DelayedVestingAccount; + fromPartial(object: DeepPartial): DelayedVestingAccount; +}; +export declare const Period: { + encode(message: Period, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Period; + fromPartial(object: DeepPartial): Period; +}; +export declare const PeriodicVestingAccount: { + encode(message: PeriodicVestingAccount, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PeriodicVestingAccount; + fromPartial(object: DeepPartial): PeriodicVestingAccount; +}; +export declare const PermanentLockedAccount: { + encode(message: PermanentLockedAccount, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PermanentLockedAccount; + fromPartial(object: DeepPartial): PermanentLockedAccount; +}; diff --git a/packages/codegen/dist/cosmos_proto/bundle.d.ts b/packages/codegen/dist/cosmos_proto/bundle.d.ts new file mode 100644 index 00000000..1f962e88 --- /dev/null +++ b/packages/codegen/dist/cosmos_proto/bundle.d.ts @@ -0,0 +1,24 @@ +import * as _2 from "./cosmos"; +export declare const cosmos_proto: { + scalarTypeFromJSON(object: any): _2.ScalarType; + scalarTypeToJSON(object: _2.ScalarType): string; + ScalarType: typeof _2.ScalarType; + ScalarTypeSDKType: typeof _2.ScalarType; + InterfaceDescriptor: { + encode(message: _2.InterfaceDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _2.InterfaceDescriptor; + fromPartial(object: { + name?: string; + description?: string; + }): _2.InterfaceDescriptor; + }; + ScalarDescriptor: { + encode(message: _2.ScalarDescriptor, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _2.ScalarDescriptor; + fromPartial(object: { + name?: string; + description?: string; + fieldType?: _2.ScalarType[]; + }): _2.ScalarDescriptor; + }; +}; diff --git a/packages/codegen/dist/cosmos_proto/cosmos.d.ts b/packages/codegen/dist/cosmos_proto/cosmos.d.ts new file mode 100644 index 00000000..c53b3d44 --- /dev/null +++ b/packages/codegen/dist/cosmos_proto/cosmos.d.ts @@ -0,0 +1,92 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../helpers"; +export declare enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1 +} +export declare const ScalarTypeSDKType: typeof ScalarType; +export declare function scalarTypeFromJSON(object: any): ScalarType; +export declare function scalarTypeToJSON(object: ScalarType): string; +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptorSDKType { + name: string; + description: string; +} +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptorSDKType { + name: string; + description: string; + field_type: ScalarType[]; +} +export declare const InterfaceDescriptor: { + encode(message: InterfaceDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor; + fromPartial(object: DeepPartial): InterfaceDescriptor; +}; +export declare const ScalarDescriptor: { + encode(message: ScalarDescriptor, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor; + fromPartial(object: DeepPartial): ScalarDescriptor; +}; diff --git a/packages/codegen/dist/cosmwasm/bundle.d.ts b/packages/codegen/dist/cosmwasm/bundle.d.ts new file mode 100644 index 00000000..14b8169e --- /dev/null +++ b/packages/codegen/dist/cosmwasm/bundle.d.ts @@ -0,0 +1,1284 @@ +/// +import * as _95 from "./wasm/v1/authz"; +import * as _96 from "./wasm/v1/genesis"; +import * as _97 from "./wasm/v1/ibc"; +import * as _98 from "./wasm/v1/proposal"; +import * as _99 from "./wasm/v1/query"; +import * as _100 from "./wasm/v1/tx"; +import * as _101 from "./wasm/v1/types"; +import * as _194 from "./wasm/v1/query.lcd"; +import * as _195 from "./wasm/v1/query.rpc.Query"; +import * as _196 from "./wasm/v1/tx.rpc.msg"; +export declare namespace cosmwasm { + namespace wasm { + const v1: { + MsgClientImpl: typeof _196.MsgClientImpl; + QueryClientImpl: typeof _195.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + contractInfo(request: _99.QueryContractInfoRequest): Promise<_99.QueryContractInfoResponse>; + contractHistory(request: _99.QueryContractHistoryRequest): Promise<_99.QueryContractHistoryResponse>; + contractsByCode(request: _99.QueryContractsByCodeRequest): Promise<_99.QueryContractsByCodeResponse>; + allContractState(request: _99.QueryAllContractStateRequest): Promise<_99.QueryAllContractStateResponse>; + rawContractState(request: _99.QueryRawContractStateRequest): Promise<_99.QueryRawContractStateResponse>; + smartContractState(request: _99.QuerySmartContractStateRequest): Promise<_99.QuerySmartContractStateResponse>; + code(request: _99.QueryCodeRequest): Promise<_99.QueryCodeResponse>; + codes(request?: _99.QueryCodesRequest): Promise<_99.QueryCodesResponse>; + pinnedCodes(request?: _99.QueryPinnedCodesRequest): Promise<_99.QueryPinnedCodesResponse>; + params(request?: _99.QueryParamsRequest): Promise<_99.QueryParamsResponse>; + contractsByCreator(request: _99.QueryContractsByCreatorRequest): Promise<_99.QueryContractsByCreatorResponse>; + }; + LCDQueryClient: typeof _194.LCDQueryClient; + accessTypeFromJSON(object: any): _101.AccessType; + accessTypeToJSON(object: _101.AccessType): string; + contractCodeHistoryOperationTypeFromJSON(object: any): _101.ContractCodeHistoryOperationType; + contractCodeHistoryOperationTypeToJSON(object: _101.ContractCodeHistoryOperationType): string; + AccessType: typeof _101.AccessType; + AccessTypeSDKType: typeof _101.AccessType; + ContractCodeHistoryOperationType: typeof _101.ContractCodeHistoryOperationType; + ContractCodeHistoryOperationTypeSDKType: typeof _101.ContractCodeHistoryOperationType; + AccessTypeParam: { + encode(message: _101.AccessTypeParam, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _101.AccessTypeParam; + fromPartial(object: { + value?: _101.AccessType; + }): _101.AccessTypeParam; + }; + AccessConfig: { + encode(message: _101.AccessConfig, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _101.AccessConfig; + fromPartial(object: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }): _101.AccessConfig; + }; + Params: { + encode(message: _101.Params, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _101.Params; + fromPartial(object: { + codeUploadAccess?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + instantiateDefaultPermission?: _101.AccessType; + }): _101.Params; + }; + CodeInfo: { + encode(message: _101.CodeInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _101.CodeInfo; + fromPartial(object: { + codeHash?: Uint8Array; + creator?: string; + instantiateConfig?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + }): _101.CodeInfo; + }; + ContractInfo: { + encode(message: _101.ContractInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _101.ContractInfo; + fromPartial(object: { + codeId?: string | number | import("long").Long; + creator?: string; + admin?: string; + label?: string; + created?: { + blockHeight?: string | number | import("long").Long; + txIndex?: string | number | import("long").Long; + }; + ibcPortId?: string; + extension?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _101.ContractInfo; + }; + ContractCodeHistoryEntry: { + encode(message: _101.ContractCodeHistoryEntry, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _101.ContractCodeHistoryEntry; + fromPartial(object: { + operation?: _101.ContractCodeHistoryOperationType; + codeId?: string | number | import("long").Long; + updated?: { + blockHeight?: string | number | import("long").Long; + txIndex?: string | number | import("long").Long; + }; + msg?: Uint8Array; + }): _101.ContractCodeHistoryEntry; + }; + AbsoluteTxPosition: { + encode(message: _101.AbsoluteTxPosition, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _101.AbsoluteTxPosition; + fromPartial(object: { + blockHeight?: string | number | import("long").Long; + txIndex?: string | number | import("long").Long; + }): _101.AbsoluteTxPosition; + }; + Model: { + encode(message: _101.Model, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _101.Model; + fromPartial(object: { + key?: Uint8Array; + value?: Uint8Array; + }): _101.Model; + }; + MsgStoreCode: { + encode(message: _100.MsgStoreCode, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgStoreCode; + fromPartial(object: { + sender?: string; + wasmByteCode?: Uint8Array; + instantiatePermission?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + }): _100.MsgStoreCode; + }; + MsgStoreCodeResponse: { + encode(message: _100.MsgStoreCodeResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgStoreCodeResponse; + fromPartial(object: { + codeId?: string | number | import("long").Long; + checksum?: Uint8Array; + }): _100.MsgStoreCodeResponse; + }; + MsgInstantiateContract: { + encode(message: _100.MsgInstantiateContract, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgInstantiateContract; + fromPartial(object: { + sender?: string; + admin?: string; + codeId?: string | number | import("long").Long; + label?: string; + msg?: Uint8Array; + funds?: { + denom?: string; + amount?: string; + }[]; + }): _100.MsgInstantiateContract; + }; + MsgInstantiateContract2: { + encode(message: _100.MsgInstantiateContract2, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgInstantiateContract2; + fromPartial(object: { + sender?: string; + admin?: string; + codeId?: string | number | import("long").Long; + label?: string; + msg?: Uint8Array; + funds?: { + denom?: string; + amount?: string; + }[]; + salt?: Uint8Array; + fixMsg?: boolean; + }): _100.MsgInstantiateContract2; + }; + MsgInstantiateContractResponse: { + encode(message: _100.MsgInstantiateContractResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgInstantiateContractResponse; + fromPartial(object: { + address?: string; + data?: Uint8Array; + }): _100.MsgInstantiateContractResponse; + }; + MsgInstantiateContract2Response: { + encode(message: _100.MsgInstantiateContract2Response, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgInstantiateContract2Response; + fromPartial(object: { + address?: string; + data?: Uint8Array; + }): _100.MsgInstantiateContract2Response; + }; + MsgExecuteContract: { + encode(message: _100.MsgExecuteContract, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgExecuteContract; + fromPartial(object: { + sender?: string; + contract?: string; + msg?: Uint8Array; + funds?: { + denom?: string; + amount?: string; + }[]; + }): _100.MsgExecuteContract; + }; + MsgExecuteContractResponse: { + encode(message: _100.MsgExecuteContractResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgExecuteContractResponse; + fromPartial(object: { + data?: Uint8Array; + }): _100.MsgExecuteContractResponse; + }; + MsgMigrateContract: { + encode(message: _100.MsgMigrateContract, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgMigrateContract; + fromPartial(object: { + sender?: string; + contract?: string; + codeId?: string | number | import("long").Long; + msg?: Uint8Array; + }): _100.MsgMigrateContract; + }; + MsgMigrateContractResponse: { + encode(message: _100.MsgMigrateContractResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgMigrateContractResponse; + fromPartial(object: { + data?: Uint8Array; + }): _100.MsgMigrateContractResponse; + }; + MsgUpdateAdmin: { + encode(message: _100.MsgUpdateAdmin, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgUpdateAdmin; + fromPartial(object: { + sender?: string; + newAdmin?: string; + contract?: string; + }): _100.MsgUpdateAdmin; + }; + MsgUpdateAdminResponse: { + encode(_: _100.MsgUpdateAdminResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgUpdateAdminResponse; + fromPartial(_: {}): _100.MsgUpdateAdminResponse; + }; + MsgClearAdmin: { + encode(message: _100.MsgClearAdmin, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgClearAdmin; + fromPartial(object: { + sender?: string; + contract?: string; + }): _100.MsgClearAdmin; + }; + MsgClearAdminResponse: { + encode(_: _100.MsgClearAdminResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _100.MsgClearAdminResponse; + fromPartial(_: {}): _100.MsgClearAdminResponse; + }; + QueryContractInfoRequest: { + encode(message: _99.QueryContractInfoRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryContractInfoRequest; + fromPartial(object: { + address?: string; + }): _99.QueryContractInfoRequest; + }; + QueryContractInfoResponse: { + encode(message: _99.QueryContractInfoResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryContractInfoResponse; + fromPartial(object: { + address?: string; + contractInfo?: { + codeId?: string | number | import("long").Long; + creator?: string; + admin?: string; + label?: string; + created?: { + blockHeight?: string | number | import("long").Long; + txIndex?: string | number | import("long").Long; + }; + ibcPortId?: string; + extension?: { + typeUrl?: string; + value?: Uint8Array; + }; + }; + }): _99.QueryContractInfoResponse; + }; + QueryContractHistoryRequest: { + encode(message: _99.QueryContractHistoryRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryContractHistoryRequest; + fromPartial(object: { + address?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _99.QueryContractHistoryRequest; + }; + QueryContractHistoryResponse: { + encode(message: _99.QueryContractHistoryResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryContractHistoryResponse; + fromPartial(object: { + entries?: { + operation?: _101.ContractCodeHistoryOperationType; + codeId?: string | number | import("long").Long; + updated?: { + blockHeight?: string | number | import("long").Long; + txIndex?: string | number | import("long").Long; + }; + msg?: Uint8Array; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _99.QueryContractHistoryResponse; + }; + QueryContractsByCodeRequest: { + encode(message: _99.QueryContractsByCodeRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryContractsByCodeRequest; + fromPartial(object: { + codeId?: string | number | import("long").Long; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _99.QueryContractsByCodeRequest; + }; + QueryContractsByCodeResponse: { + encode(message: _99.QueryContractsByCodeResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryContractsByCodeResponse; + fromPartial(object: { + contracts?: string[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _99.QueryContractsByCodeResponse; + }; + QueryAllContractStateRequest: { + encode(message: _99.QueryAllContractStateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryAllContractStateRequest; + fromPartial(object: { + address?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _99.QueryAllContractStateRequest; + }; + QueryAllContractStateResponse: { + encode(message: _99.QueryAllContractStateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryAllContractStateResponse; + fromPartial(object: { + models?: { + key?: Uint8Array; + value?: Uint8Array; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _99.QueryAllContractStateResponse; + }; + QueryRawContractStateRequest: { + encode(message: _99.QueryRawContractStateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryRawContractStateRequest; + fromPartial(object: { + address?: string; + queryData?: Uint8Array; + }): _99.QueryRawContractStateRequest; + }; + QueryRawContractStateResponse: { + encode(message: _99.QueryRawContractStateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryRawContractStateResponse; + fromPartial(object: { + data?: Uint8Array; + }): _99.QueryRawContractStateResponse; + }; + QuerySmartContractStateRequest: { + encode(message: _99.QuerySmartContractStateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QuerySmartContractStateRequest; + fromPartial(object: { + address?: string; + queryData?: Uint8Array; + }): _99.QuerySmartContractStateRequest; + }; + QuerySmartContractStateResponse: { + encode(message: _99.QuerySmartContractStateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QuerySmartContractStateResponse; + fromPartial(object: { + data?: Uint8Array; + }): _99.QuerySmartContractStateResponse; + }; + QueryCodeRequest: { + encode(message: _99.QueryCodeRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryCodeRequest; + fromPartial(object: { + codeId?: string | number | import("long").Long; + }): _99.QueryCodeRequest; + }; + CodeInfoResponse: { + encode(message: _99.CodeInfoResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.CodeInfoResponse; + fromPartial(object: { + codeId?: string | number | import("long").Long; + creator?: string; + dataHash?: Uint8Array; + instantiatePermission?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + }): _99.CodeInfoResponse; + }; + QueryCodeResponse: { + encode(message: _99.QueryCodeResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryCodeResponse; + fromPartial(object: { + codeInfo?: { + codeId?: string | number | import("long").Long; + creator?: string; + dataHash?: Uint8Array; + instantiatePermission?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + }; + data?: Uint8Array; + }): _99.QueryCodeResponse; + }; + QueryCodesRequest: { + encode(message: _99.QueryCodesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryCodesRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _99.QueryCodesRequest; + }; + QueryCodesResponse: { + encode(message: _99.QueryCodesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryCodesResponse; + fromPartial(object: { + codeInfos?: { + codeId?: string | number | import("long").Long; + creator?: string; + dataHash?: Uint8Array; + instantiatePermission?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _99.QueryCodesResponse; + }; + QueryPinnedCodesRequest: { + encode(message: _99.QueryPinnedCodesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryPinnedCodesRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _99.QueryPinnedCodesRequest; + }; + QueryPinnedCodesResponse: { + encode(message: _99.QueryPinnedCodesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryPinnedCodesResponse; + fromPartial(object: { + codeIds?: (string | number | import("long").Long)[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _99.QueryPinnedCodesResponse; + }; + QueryParamsRequest: { + encode(_: _99.QueryParamsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryParamsRequest; + fromPartial(_: {}): _99.QueryParamsRequest; + }; + QueryParamsResponse: { + encode(message: _99.QueryParamsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryParamsResponse; + fromPartial(object: { + params?: { + codeUploadAccess?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + instantiateDefaultPermission?: _101.AccessType; + }; + }): _99.QueryParamsResponse; + }; + QueryContractsByCreatorRequest: { + encode(message: _99.QueryContractsByCreatorRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryContractsByCreatorRequest; + fromPartial(object: { + creatorAddress?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _99.QueryContractsByCreatorRequest; + }; + QueryContractsByCreatorResponse: { + encode(message: _99.QueryContractsByCreatorResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _99.QueryContractsByCreatorResponse; + fromPartial(object: { + contractAddresses?: string[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _99.QueryContractsByCreatorResponse; + }; + StoreCodeProposal: { + encode(message: _98.StoreCodeProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.StoreCodeProposal; + fromPartial(object: { + title?: string; + description?: string; + runAs?: string; + wasmByteCode?: Uint8Array; + instantiatePermission?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + unpinCode?: boolean; + source?: string; + builder?: string; + codeHash?: Uint8Array; + }): _98.StoreCodeProposal; + }; + InstantiateContractProposal: { + encode(message: _98.InstantiateContractProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.InstantiateContractProposal; + fromPartial(object: { + title?: string; + description?: string; + runAs?: string; + admin?: string; + codeId?: string | number | import("long").Long; + label?: string; + msg?: Uint8Array; + funds?: { + denom?: string; + amount?: string; + }[]; + }): _98.InstantiateContractProposal; + }; + InstantiateContract2Proposal: { + encode(message: _98.InstantiateContract2Proposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.InstantiateContract2Proposal; + fromPartial(object: { + title?: string; + description?: string; + runAs?: string; + admin?: string; + codeId?: string | number | import("long").Long; + label?: string; + msg?: Uint8Array; + funds?: { + denom?: string; + amount?: string; + }[]; + salt?: Uint8Array; + fixMsg?: boolean; + }): _98.InstantiateContract2Proposal; + }; + MigrateContractProposal: { + encode(message: _98.MigrateContractProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.MigrateContractProposal; + fromPartial(object: { + title?: string; + description?: string; + contract?: string; + codeId?: string | number | import("long").Long; + msg?: Uint8Array; + }): _98.MigrateContractProposal; + }; + SudoContractProposal: { + encode(message: _98.SudoContractProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.SudoContractProposal; + fromPartial(object: { + title?: string; + description?: string; + contract?: string; + msg?: Uint8Array; + }): _98.SudoContractProposal; + }; + ExecuteContractProposal: { + encode(message: _98.ExecuteContractProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.ExecuteContractProposal; + fromPartial(object: { + title?: string; + description?: string; + runAs?: string; + contract?: string; + msg?: Uint8Array; + funds?: { + denom?: string; + amount?: string; + }[]; + }): _98.ExecuteContractProposal; + }; + UpdateAdminProposal: { + encode(message: _98.UpdateAdminProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.UpdateAdminProposal; + fromPartial(object: { + title?: string; + description?: string; + newAdmin?: string; + contract?: string; + }): _98.UpdateAdminProposal; + }; + ClearAdminProposal: { + encode(message: _98.ClearAdminProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.ClearAdminProposal; + fromPartial(object: { + title?: string; + description?: string; + contract?: string; + }): _98.ClearAdminProposal; + }; + PinCodesProposal: { + encode(message: _98.PinCodesProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.PinCodesProposal; + fromPartial(object: { + title?: string; + description?: string; + codeIds?: (string | number | import("long").Long)[]; + }): _98.PinCodesProposal; + }; + UnpinCodesProposal: { + encode(message: _98.UnpinCodesProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.UnpinCodesProposal; + fromPartial(object: { + title?: string; + description?: string; + codeIds?: (string | number | import("long").Long)[]; + }): _98.UnpinCodesProposal; + }; + AccessConfigUpdate: { + encode(message: _98.AccessConfigUpdate, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.AccessConfigUpdate; + fromPartial(object: { + codeId?: string | number | import("long").Long; + instantiatePermission?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + }): _98.AccessConfigUpdate; + }; + UpdateInstantiateConfigProposal: { + encode(message: _98.UpdateInstantiateConfigProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.UpdateInstantiateConfigProposal; + fromPartial(object: { + title?: string; + description?: string; + accessConfigUpdates?: { + codeId?: string | number | import("long").Long; + instantiatePermission?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + }[]; + }): _98.UpdateInstantiateConfigProposal; + }; + StoreAndInstantiateContractProposal: { + encode(message: _98.StoreAndInstantiateContractProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _98.StoreAndInstantiateContractProposal; + fromPartial(object: { + title?: string; + description?: string; + runAs?: string; + wasmByteCode?: Uint8Array; + instantiatePermission?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + unpinCode?: boolean; + admin?: string; + label?: string; + msg?: Uint8Array; + funds?: { + denom?: string; + amount?: string; + }[]; + source?: string; + builder?: string; + codeHash?: Uint8Array; + }): _98.StoreAndInstantiateContractProposal; + }; + MsgIBCSend: { + encode(message: _97.MsgIBCSend, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _97.MsgIBCSend; + fromPartial(object: { + channel?: string; + timeoutHeight?: string | number | import("long").Long; + timeoutTimestamp?: string | number | import("long").Long; + data?: Uint8Array; + }): _97.MsgIBCSend; + }; + MsgIBCCloseChannel: { + encode(message: _97.MsgIBCCloseChannel, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _97.MsgIBCCloseChannel; + fromPartial(object: { + channel?: string; + }): _97.MsgIBCCloseChannel; + }; + GenesisState: { + encode(message: _96.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _96.GenesisState; + fromPartial(object: { + params?: { + codeUploadAccess?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + instantiateDefaultPermission?: _101.AccessType; + }; + codes?: { + codeId?: string | number | import("long").Long; + codeInfo?: { + codeHash?: Uint8Array; + creator?: string; + instantiateConfig?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + }; + codeBytes?: Uint8Array; + pinned?: boolean; + }[]; + contracts?: { + contractAddress?: string; + contractInfo?: { + codeId?: string | number | import("long").Long; + creator?: string; + admin?: string; + label?: string; + created?: { + blockHeight?: string | number | import("long").Long; + txIndex?: string | number | import("long").Long; + }; + ibcPortId?: string; + extension?: { + typeUrl?: string; + value?: Uint8Array; + }; + }; + contractState?: { + key?: Uint8Array; + value?: Uint8Array; + }[]; + contractCodeHistory?: { + operation?: _101.ContractCodeHistoryOperationType; + codeId?: string | number | import("long").Long; + updated?: { + blockHeight?: string | number | import("long").Long; + txIndex?: string | number | import("long").Long; + }; + msg?: Uint8Array; + }[]; + }[]; + sequences?: { + idKey?: Uint8Array; + value?: string | number | import("long").Long; + }[]; + }): _96.GenesisState; + }; + Code: { + encode(message: _96.Code, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _96.Code; + fromPartial(object: { + codeId?: string | number | import("long").Long; + codeInfo?: { + codeHash?: Uint8Array; + creator?: string; + instantiateConfig?: { + permission?: _101.AccessType; + address?: string; + addresses?: string[]; + }; + }; + codeBytes?: Uint8Array; + pinned?: boolean; + }): _96.Code; + }; + Contract: { + encode(message: _96.Contract, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _96.Contract; + fromPartial(object: { + contractAddress?: string; + contractInfo?: { + codeId?: string | number | import("long").Long; + creator?: string; + admin?: string; + label?: string; + created?: { + blockHeight?: string | number | import("long").Long; + txIndex?: string | number | import("long").Long; + }; + ibcPortId?: string; + extension?: { + typeUrl?: string; + value?: Uint8Array; + }; + }; + contractState?: { + key?: Uint8Array; + value?: Uint8Array; + }[]; + contractCodeHistory?: { + operation?: _101.ContractCodeHistoryOperationType; + codeId?: string | number | import("long").Long; + updated?: { + blockHeight?: string | number | import("long").Long; + txIndex?: string | number | import("long").Long; + }; + msg?: Uint8Array; + }[]; + }): _96.Contract; + }; + Sequence: { + encode(message: _96.Sequence, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _96.Sequence; + fromPartial(object: { + idKey?: Uint8Array; + value?: string | number | import("long").Long; + }): _96.Sequence; + }; + ContractExecutionAuthorization: { + encode(message: _95.ContractExecutionAuthorization, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _95.ContractExecutionAuthorization; + fromPartial(object: { + grants?: { + contract?: string; + limit?: { + typeUrl?: string; + value?: Uint8Array; + }; + filter?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + }): _95.ContractExecutionAuthorization; + }; + ContractMigrationAuthorization: { + encode(message: _95.ContractMigrationAuthorization, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _95.ContractMigrationAuthorization; + fromPartial(object: { + grants?: { + contract?: string; + limit?: { + typeUrl?: string; + value?: Uint8Array; + }; + filter?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + }): _95.ContractMigrationAuthorization; + }; + ContractGrant: { + encode(message: _95.ContractGrant, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _95.ContractGrant; + fromPartial(object: { + contract?: string; + limit?: { + typeUrl?: string; + value?: Uint8Array; + }; + filter?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _95.ContractGrant; + }; + MaxCallsLimit: { + encode(message: _95.MaxCallsLimit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _95.MaxCallsLimit; + fromPartial(object: { + remaining?: string | number | import("long").Long; + }): _95.MaxCallsLimit; + }; + MaxFundsLimit: { + encode(message: _95.MaxFundsLimit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _95.MaxFundsLimit; + fromPartial(object: { + amounts?: { + denom?: string; + amount?: string; + }[]; + }): _95.MaxFundsLimit; + }; + CombinedLimit: { + encode(message: _95.CombinedLimit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _95.CombinedLimit; + fromPartial(object: { + callsRemaining?: string | number | import("long").Long; + amounts?: { + denom?: string; + amount?: string; + }[]; + }): _95.CombinedLimit; + }; + AllowAllMessagesFilter: { + encode(_: _95.AllowAllMessagesFilter, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _95.AllowAllMessagesFilter; + fromPartial(_: {}): _95.AllowAllMessagesFilter; + }; + AcceptedMessageKeysFilter: { + encode(message: _95.AcceptedMessageKeysFilter, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _95.AcceptedMessageKeysFilter; + fromPartial(object: { + keys?: string[]; + }): _95.AcceptedMessageKeysFilter; + }; + AcceptedMessagesFilter: { + encode(message: _95.AcceptedMessagesFilter, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _95.AcceptedMessagesFilter; + fromPartial(object: { + messages?: Uint8Array[]; + }): _95.AcceptedMessagesFilter; + }; + }; + } + const ClientFactory: { + createRPCMsgClient: ({ rpc }: { + rpc: import("../helpers").Rpc; + }) => Promise<{ + cosmos: { + authz: { + v1beta1: import("../cosmos/authz/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + bank: { + v1beta1: import("../cosmos/bank/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + crisis: { + v1beta1: import("../cosmos/crisis/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + distribution: { + v1beta1: import("../cosmos/distribution/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + evidence: { + v1beta1: import("../cosmos/evidence/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + feegrant: { + v1beta1: import("../cosmos/feegrant/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + gov: { + v1: import("../cosmos/gov/v1/tx.rpc.msg").MsgClientImpl; + v1beta1: import("../cosmos/gov/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + group: { + v1: import("../cosmos/group/v1/tx.rpc.msg").MsgClientImpl; + }; + nft: { + v1beta1: import("../cosmos/nft/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + slashing: { + v1beta1: import("../cosmos/slashing/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + staking: { + v1beta1: import("../cosmos/staking/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + upgrade: { + v1beta1: import("../cosmos/upgrade/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + vesting: { + v1beta1: import("../cosmos/vesting/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + }; + cosmwasm: { + wasm: { + v1: _196.MsgClientImpl; + }; + }; + }>; + createRPCQueryClient: ({ rpcEndpoint }: { + rpcEndpoint: string | import("@cosmjs/tendermint-rpc").HttpEndpoint; + }) => Promise<{ + cosmos: { + app: { + v1alpha1: { + config(request?: import("../cosmos/app/v1alpha1/query").QueryConfigRequest): Promise; + }; + }; + auth: { + v1beta1: { + accounts(request?: import("../cosmos/auth/v1beta1/query").QueryAccountsRequest): Promise; + account(request: import("../cosmos/auth/v1beta1/query").QueryAccountRequest): Promise; + params(request?: import("../cosmos/auth/v1beta1/query").QueryParamsRequest): Promise; + moduleAccounts(request?: import("../cosmos/auth/v1beta1/query").QueryModuleAccountsRequest): Promise; + bech32Prefix(request?: import("../cosmos/auth/v1beta1/query").Bech32PrefixRequest): Promise; + addressBytesToString(request: import("../cosmos/auth/v1beta1/query").AddressBytesToStringRequest): Promise; + addressStringToBytes(request: import("../cosmos/auth/v1beta1/query").AddressStringToBytesRequest): Promise; + }; + }; + authz: { + v1beta1: { + grants(request: import("../cosmos/authz/v1beta1/query").QueryGrantsRequest): Promise; + granterGrants(request: import("../cosmos/authz/v1beta1/query").QueryGranterGrantsRequest): Promise; + granteeGrants(request: import("../cosmos/authz/v1beta1/query").QueryGranteeGrantsRequest): Promise; + }; + }; + bank: { + v1beta1: { + balance(request: import("../cosmos/bank/v1beta1/query").QueryBalanceRequest): Promise; + allBalances(request: import("../cosmos/bank/v1beta1/query").QueryAllBalancesRequest): Promise; + spendableBalances(request: import("../cosmos/bank/v1beta1/query").QuerySpendableBalancesRequest): Promise; + totalSupply(request?: import("../cosmos/bank/v1beta1/query").QueryTotalSupplyRequest): Promise; + supplyOf(request: import("../cosmos/bank/v1beta1/query").QuerySupplyOfRequest): Promise; + params(request?: import("../cosmos/bank/v1beta1/query").QueryParamsRequest): Promise; + denomMetadata(request: import("../cosmos/bank/v1beta1/query").QueryDenomMetadataRequest): Promise; + denomsMetadata(request?: import("../cosmos/bank/v1beta1/query").QueryDenomsMetadataRequest): Promise; + denomOwners(request: import("../cosmos/bank/v1beta1/query").QueryDenomOwnersRequest): Promise; + }; + }; + base: { + tendermint: { + v1beta1: { + getNodeInfo(request?: import("../cosmos/base/tendermint/v1beta1/query").GetNodeInfoRequest): Promise; + getSyncing(request?: import("../cosmos/base/tendermint/v1beta1/query").GetSyncingRequest): Promise; + getLatestBlock(request?: import("../cosmos/base/tendermint/v1beta1/query").GetLatestBlockRequest): Promise; + getBlockByHeight(request: import("../cosmos/base/tendermint/v1beta1/query").GetBlockByHeightRequest): Promise; + getLatestValidatorSet(request?: import("../cosmos/base/tendermint/v1beta1/query").GetLatestValidatorSetRequest): Promise; + getValidatorSetByHeight(request: import("../cosmos/base/tendermint/v1beta1/query").GetValidatorSetByHeightRequest): Promise; + }; + }; + }; + distribution: { + v1beta1: { + params(request?: import("../cosmos/distribution/v1beta1/query").QueryParamsRequest): Promise; + validatorOutstandingRewards(request: import("../cosmos/distribution/v1beta1/query").QueryValidatorOutstandingRewardsRequest): Promise; + validatorCommission(request: import("../cosmos/distribution/v1beta1/query").QueryValidatorCommissionRequest): Promise; + validatorSlashes(request: import("../cosmos/distribution/v1beta1/query").QueryValidatorSlashesRequest): Promise; + delegationRewards(request: import("../cosmos/distribution/v1beta1/query").QueryDelegationRewardsRequest): Promise; + delegationTotalRewards(request: import("../cosmos/distribution/v1beta1/query").QueryDelegationTotalRewardsRequest): Promise; + delegatorValidators(request: import("../cosmos/distribution/v1beta1/query").QueryDelegatorValidatorsRequest): Promise; + delegatorWithdrawAddress(request: import("../cosmos/distribution/v1beta1/query").QueryDelegatorWithdrawAddressRequest): Promise; + communityPool(request?: import("../cosmos/distribution/v1beta1/query").QueryCommunityPoolRequest): Promise; + }; + }; + evidence: { + v1beta1: { + evidence(request: import("../cosmos/evidence/v1beta1/query").QueryEvidenceRequest): Promise; + allEvidence(request?: import("../cosmos/evidence/v1beta1/query").QueryAllEvidenceRequest): Promise; + }; + }; + feegrant: { + v1beta1: { + allowance(request: import("../cosmos/feegrant/v1beta1/query").QueryAllowanceRequest): Promise; + allowances(request: import("../cosmos/feegrant/v1beta1/query").QueryAllowancesRequest): Promise; + allowancesByGranter(request: import("../cosmos/feegrant/v1beta1/query").QueryAllowancesByGranterRequest): Promise; + }; + }; + gov: { + v1: { + proposal(request: import("../cosmos/gov/v1/query").QueryProposalRequest): Promise; + proposals(request: import("../cosmos/gov/v1/query").QueryProposalsRequest): Promise; + vote(request: import("../cosmos/gov/v1/query").QueryVoteRequest): Promise; + votes(request: import("../cosmos/gov/v1/query").QueryVotesRequest): Promise; + params(request: import("../cosmos/gov/v1/query").QueryParamsRequest): Promise; + deposit(request: import("../cosmos/gov/v1/query").QueryDepositRequest): Promise; + deposits(request: import("../cosmos/gov/v1/query").QueryDepositsRequest): Promise; + tallyResult(request: import("../cosmos/gov/v1/query").QueryTallyResultRequest): Promise; + }; + v1beta1: { + proposal(request: import("../cosmos/gov/v1beta1/query").QueryProposalRequest): Promise; + proposals(request: import("../cosmos/gov/v1beta1/query").QueryProposalsRequest): Promise; + vote(request: import("../cosmos/gov/v1beta1/query").QueryVoteRequest): Promise; + votes(request: import("../cosmos/gov/v1beta1/query").QueryVotesRequest): Promise; + params(request: import("../cosmos/gov/v1beta1/query").QueryParamsRequest): Promise; + deposit(request: import("../cosmos/gov/v1beta1/query").QueryDepositRequest): Promise; + deposits(request: import("../cosmos/gov/v1beta1/query").QueryDepositsRequest): Promise; + tallyResult(request: import("../cosmos/gov/v1beta1/query").QueryTallyResultRequest): Promise; + }; + }; + group: { + v1: { + groupInfo(request: import("../cosmos/group/v1/query").QueryGroupInfoRequest): Promise; + groupPolicyInfo(request: import("../cosmos/group/v1/query").QueryGroupPolicyInfoRequest): Promise; + groupMembers(request: import("../cosmos/group/v1/query").QueryGroupMembersRequest): Promise; + groupsByAdmin(request: import("../cosmos/group/v1/query").QueryGroupsByAdminRequest): Promise; + groupPoliciesByGroup(request: import("../cosmos/group/v1/query").QueryGroupPoliciesByGroupRequest): Promise; + groupPoliciesByAdmin(request: import("../cosmos/group/v1/query").QueryGroupPoliciesByAdminRequest): Promise; + proposal(request: import("../cosmos/group/v1/query").QueryProposalRequest): Promise; + proposalsByGroupPolicy(request: import("../cosmos/group/v1/query").QueryProposalsByGroupPolicyRequest): Promise; + voteByProposalVoter(request: import("../cosmos/group/v1/query").QueryVoteByProposalVoterRequest): Promise; + votesByProposal(request: import("../cosmos/group/v1/query").QueryVotesByProposalRequest): Promise; + votesByVoter(request: import("../cosmos/group/v1/query").QueryVotesByVoterRequest): Promise; + groupsByMember(request: import("../cosmos/group/v1/query").QueryGroupsByMemberRequest): Promise; + tallyResult(request: import("../cosmos/group/v1/query").QueryTallyResultRequest): Promise; + }; + }; + mint: { + v1beta1: { + params(request?: import("../cosmos/mint/v1beta1/query").QueryParamsRequest): Promise; + inflation(request?: import("../cosmos/mint/v1beta1/query").QueryInflationRequest): Promise; + annualProvisions(request?: import("../cosmos/mint/v1beta1/query").QueryAnnualProvisionsRequest): Promise; + }; + }; + nft: { + v1beta1: { + balance(request: import("../cosmos/nft/v1beta1/query").QueryBalanceRequest): Promise; + owner(request: import("../cosmos/nft/v1beta1/query").QueryOwnerRequest): Promise; + supply(request: import("../cosmos/nft/v1beta1/query").QuerySupplyRequest): Promise; + nFTs(request: import("../cosmos/nft/v1beta1/query").QueryNFTsRequest): Promise; + nFT(request: import("../cosmos/nft/v1beta1/query").QueryNFTRequest): Promise; + class(request: import("../cosmos/nft/v1beta1/query").QueryClassRequest): Promise; + classes(request?: import("../cosmos/nft/v1beta1/query").QueryClassesRequest): Promise; + }; + }; + params: { + v1beta1: { + params(request: import("../cosmos/params/v1beta1/query").QueryParamsRequest): Promise; + subspaces(request?: import("../cosmos/params/v1beta1/query").QuerySubspacesRequest): Promise; + }; + }; + slashing: { + v1beta1: { + params(request?: import("../cosmos/slashing/v1beta1/query").QueryParamsRequest): Promise; + signingInfo(request: import("../cosmos/slashing/v1beta1/query").QuerySigningInfoRequest): Promise; + signingInfos(request?: import("../cosmos/slashing/v1beta1/query").QuerySigningInfosRequest): Promise; + }; + }; + staking: { + v1beta1: { + validators(request: import("../cosmos/staking/v1beta1/query").QueryValidatorsRequest): Promise; + validator(request: import("../cosmos/staking/v1beta1/query").QueryValidatorRequest): Promise; + validatorDelegations(request: import("../cosmos/staking/v1beta1/query").QueryValidatorDelegationsRequest): Promise; + validatorUnbondingDelegations(request: import("../cosmos/staking/v1beta1/query").QueryValidatorUnbondingDelegationsRequest): Promise; + delegation(request: import("../cosmos/staking/v1beta1/query").QueryDelegationRequest): Promise; + unbondingDelegation(request: import("../cosmos/staking/v1beta1/query").QueryUnbondingDelegationRequest): Promise; + delegatorDelegations(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorDelegationsRequest): Promise; + delegatorUnbondingDelegations(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorUnbondingDelegationsRequest): Promise; + redelegations(request: import("../cosmos/staking/v1beta1/query").QueryRedelegationsRequest): Promise; + delegatorValidators(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorValidatorsRequest): Promise; + delegatorValidator(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorValidatorRequest): Promise; + historicalInfo(request: import("../cosmos/staking/v1beta1/query").QueryHistoricalInfoRequest): Promise; + pool(request?: import("../cosmos/staking/v1beta1/query").QueryPoolRequest): Promise; + params(request?: import("../cosmos/staking/v1beta1/query").QueryParamsRequest): Promise; + }; + }; + tx: { + v1beta1: { + simulate(request: import("../cosmos/tx/v1beta1/service").SimulateRequest): Promise; + getTx(request: import("../cosmos/tx/v1beta1/service").GetTxRequest): Promise; + broadcastTx(request: import("../cosmos/tx/v1beta1/service").BroadcastTxRequest): Promise; + getTxsEvent(request: import("../cosmos/tx/v1beta1/service").GetTxsEventRequest): Promise; + getBlockWithTxs(request: import("../cosmos/tx/v1beta1/service").GetBlockWithTxsRequest): Promise; + }; + }; + upgrade: { + v1beta1: { + currentPlan(request?: import("../cosmos/upgrade/v1beta1/query").QueryCurrentPlanRequest): Promise; + appliedPlan(request: import("../cosmos/upgrade/v1beta1/query").QueryAppliedPlanRequest): Promise; + upgradedConsensusState(request: import("../cosmos/upgrade/v1beta1/query").QueryUpgradedConsensusStateRequest): Promise; + moduleVersions(request: import("../cosmos/upgrade/v1beta1/query").QueryModuleVersionsRequest): Promise; + authority(request?: import("../cosmos/upgrade/v1beta1/query").QueryAuthorityRequest): Promise; + }; + }; + }; + cosmwasm: { + wasm: { + v1: { + contractInfo(request: _99.QueryContractInfoRequest): Promise<_99.QueryContractInfoResponse>; + contractHistory(request: _99.QueryContractHistoryRequest): Promise<_99.QueryContractHistoryResponse>; + contractsByCode(request: _99.QueryContractsByCodeRequest): Promise<_99.QueryContractsByCodeResponse>; + allContractState(request: _99.QueryAllContractStateRequest): Promise<_99.QueryAllContractStateResponse>; + rawContractState(request: _99.QueryRawContractStateRequest): Promise<_99.QueryRawContractStateResponse>; + smartContractState(request: _99.QuerySmartContractStateRequest): Promise<_99.QuerySmartContractStateResponse>; + code(request: _99.QueryCodeRequest): Promise<_99.QueryCodeResponse>; + codes(request?: _99.QueryCodesRequest): Promise<_99.QueryCodesResponse>; + pinnedCodes(request?: _99.QueryPinnedCodesRequest): Promise<_99.QueryPinnedCodesResponse>; + params(request?: _99.QueryParamsRequest): Promise<_99.QueryParamsResponse>; + contractsByCreator(request: _99.QueryContractsByCreatorRequest): Promise<_99.QueryContractsByCreatorResponse>; + }; + }; + }; + }>; + createLCDClient: ({ restEndpoint }: { + restEndpoint: string; + }) => Promise<{ + cosmos: { + auth: { + v1beta1: import("../cosmos/auth/v1beta1/query.lcd").LCDQueryClient; + }; + authz: { + v1beta1: import("../cosmos/authz/v1beta1/query.lcd").LCDQueryClient; + }; + bank: { + v1beta1: import("../cosmos/bank/v1beta1/query.lcd").LCDQueryClient; + }; + base: { + tendermint: { + v1beta1: import("../cosmos/base/tendermint/v1beta1/query.lcd").LCDQueryClient; + }; + }; + distribution: { + v1beta1: import("../cosmos/distribution/v1beta1/query.lcd").LCDQueryClient; + }; + evidence: { + v1beta1: import("../cosmos/evidence/v1beta1/query.lcd").LCDQueryClient; + }; + feegrant: { + v1beta1: import("../cosmos/feegrant/v1beta1/query.lcd").LCDQueryClient; + }; + gov: { + v1: import("../cosmos/gov/v1/query.lcd").LCDQueryClient; + v1beta1: import("../cosmos/gov/v1beta1/query.lcd").LCDQueryClient; + }; + group: { + v1: import("../cosmos/group/v1/query.lcd").LCDQueryClient; + }; + mint: { + v1beta1: import("../cosmos/mint/v1beta1/query.lcd").LCDQueryClient; + }; + nft: { + v1beta1: import("../cosmos/nft/v1beta1/query.lcd").LCDQueryClient; + }; + params: { + v1beta1: import("../cosmos/params/v1beta1/query.lcd").LCDQueryClient; + }; + slashing: { + v1beta1: import("../cosmos/slashing/v1beta1/query.lcd").LCDQueryClient; + }; + staking: { + v1beta1: import("../cosmos/staking/v1beta1/query.lcd").LCDQueryClient; + }; + tx: { + v1beta1: import("../cosmos/tx/v1beta1/service.lcd").LCDQueryClient; + }; + upgrade: { + v1beta1: import("../cosmos/upgrade/v1beta1/query.lcd").LCDQueryClient; + }; + }; + cosmwasm: { + wasm: { + v1: _194.LCDQueryClient; + }; + }; + }>; + }; +} diff --git a/packages/codegen/dist/cosmwasm/lcd.d.ts b/packages/codegen/dist/cosmwasm/lcd.d.ts new file mode 100644 index 00000000..be718295 --- /dev/null +++ b/packages/codegen/dist/cosmwasm/lcd.d.ts @@ -0,0 +1,62 @@ +export declare const createLCDClient: ({ restEndpoint }: { + restEndpoint: string; +}) => Promise<{ + cosmos: { + auth: { + v1beta1: import("../cosmos/auth/v1beta1/query.lcd").LCDQueryClient; + }; + authz: { + v1beta1: import("../cosmos/authz/v1beta1/query.lcd").LCDQueryClient; + }; + bank: { + v1beta1: import("../cosmos/bank/v1beta1/query.lcd").LCDQueryClient; + }; + base: { + tendermint: { + v1beta1: import("../cosmos/base/tendermint/v1beta1/query.lcd").LCDQueryClient; + }; + }; + distribution: { + v1beta1: import("../cosmos/distribution/v1beta1/query.lcd").LCDQueryClient; + }; + evidence: { + v1beta1: import("../cosmos/evidence/v1beta1/query.lcd").LCDQueryClient; + }; + feegrant: { + v1beta1: import("../cosmos/feegrant/v1beta1/query.lcd").LCDQueryClient; + }; + gov: { + v1: import("../cosmos/gov/v1/query.lcd").LCDQueryClient; + v1beta1: import("../cosmos/gov/v1beta1/query.lcd").LCDQueryClient; + }; + group: { + v1: import("../cosmos/group/v1/query.lcd").LCDQueryClient; + }; + mint: { + v1beta1: import("../cosmos/mint/v1beta1/query.lcd").LCDQueryClient; + }; + nft: { + v1beta1: import("../cosmos/nft/v1beta1/query.lcd").LCDQueryClient; + }; + params: { + v1beta1: import("../cosmos/params/v1beta1/query.lcd").LCDQueryClient; + }; + slashing: { + v1beta1: import("../cosmos/slashing/v1beta1/query.lcd").LCDQueryClient; + }; + staking: { + v1beta1: import("../cosmos/staking/v1beta1/query.lcd").LCDQueryClient; + }; + tx: { + v1beta1: import("../cosmos/tx/v1beta1/service.lcd").LCDQueryClient; + }; + upgrade: { + v1beta1: import("../cosmos/upgrade/v1beta1/query.lcd").LCDQueryClient; + }; + }; + cosmwasm: { + wasm: { + v1: import("./wasm/v1/query.lcd").LCDQueryClient; + }; + }; +}>; diff --git a/packages/codegen/dist/cosmwasm/rpc.query.d.ts b/packages/codegen/dist/cosmwasm/rpc.query.d.ts new file mode 100644 index 00000000..0f25ccba --- /dev/null +++ b/packages/codegen/dist/cosmwasm/rpc.query.d.ts @@ -0,0 +1,204 @@ +import { HttpEndpoint } from "@cosmjs/tendermint-rpc"; +export declare const createRPCQueryClient: ({ rpcEndpoint }: { + rpcEndpoint: string | HttpEndpoint; +}) => Promise<{ + cosmos: { + app: { + v1alpha1: { + config(request?: import("../cosmos/app/v1alpha1/query").QueryConfigRequest): Promise; + }; + }; + auth: { + v1beta1: { + accounts(request?: import("../cosmos/auth/v1beta1/query").QueryAccountsRequest): Promise; + account(request: import("../cosmos/auth/v1beta1/query").QueryAccountRequest): Promise; + params(request?: import("../cosmos/auth/v1beta1/query").QueryParamsRequest): Promise; + moduleAccounts(request?: import("../cosmos/auth/v1beta1/query").QueryModuleAccountsRequest): Promise; + bech32Prefix(request?: import("../cosmos/auth/v1beta1/query").Bech32PrefixRequest): Promise; + addressBytesToString(request: import("../cosmos/auth/v1beta1/query").AddressBytesToStringRequest): Promise; + addressStringToBytes(request: import("../cosmos/auth/v1beta1/query").AddressStringToBytesRequest): Promise; + }; + }; + authz: { + v1beta1: { + grants(request: import("../cosmos/authz/v1beta1/query").QueryGrantsRequest): Promise; + granterGrants(request: import("../cosmos/authz/v1beta1/query").QueryGranterGrantsRequest): Promise; + granteeGrants(request: import("../cosmos/authz/v1beta1/query").QueryGranteeGrantsRequest): Promise; + }; + }; + bank: { + v1beta1: { + balance(request: import("../cosmos/bank/v1beta1/query").QueryBalanceRequest): Promise; + allBalances(request: import("../cosmos/bank/v1beta1/query").QueryAllBalancesRequest): Promise; + spendableBalances(request: import("../cosmos/bank/v1beta1/query").QuerySpendableBalancesRequest): Promise; + totalSupply(request?: import("../cosmos/bank/v1beta1/query").QueryTotalSupplyRequest): Promise; + supplyOf(request: import("../cosmos/bank/v1beta1/query").QuerySupplyOfRequest): Promise; + params(request?: import("../cosmos/bank/v1beta1/query").QueryParamsRequest): Promise; + denomMetadata(request: import("../cosmos/bank/v1beta1/query").QueryDenomMetadataRequest): Promise; + denomsMetadata(request?: import("../cosmos/bank/v1beta1/query").QueryDenomsMetadataRequest): Promise; + denomOwners(request: import("../cosmos/bank/v1beta1/query").QueryDenomOwnersRequest): Promise; + }; + }; + base: { + tendermint: { + v1beta1: { + getNodeInfo(request?: import("../cosmos/base/tendermint/v1beta1/query").GetNodeInfoRequest): Promise; + getSyncing(request?: import("../cosmos/base/tendermint/v1beta1/query").GetSyncingRequest): Promise; + getLatestBlock(request?: import("../cosmos/base/tendermint/v1beta1/query").GetLatestBlockRequest): Promise; + getBlockByHeight(request: import("../cosmos/base/tendermint/v1beta1/query").GetBlockByHeightRequest): Promise; + getLatestValidatorSet(request?: import("../cosmos/base/tendermint/v1beta1/query").GetLatestValidatorSetRequest): Promise; + getValidatorSetByHeight(request: import("../cosmos/base/tendermint/v1beta1/query").GetValidatorSetByHeightRequest): Promise; + }; + }; + }; + distribution: { + v1beta1: { + params(request?: import("../cosmos/distribution/v1beta1/query").QueryParamsRequest): Promise; + validatorOutstandingRewards(request: import("../cosmos/distribution/v1beta1/query").QueryValidatorOutstandingRewardsRequest): Promise; + validatorCommission(request: import("../cosmos/distribution/v1beta1/query").QueryValidatorCommissionRequest): Promise; + validatorSlashes(request: import("../cosmos/distribution/v1beta1/query").QueryValidatorSlashesRequest): Promise; + delegationRewards(request: import("../cosmos/distribution/v1beta1/query").QueryDelegationRewardsRequest): Promise; + delegationTotalRewards(request: import("../cosmos/distribution/v1beta1/query").QueryDelegationTotalRewardsRequest): Promise; + delegatorValidators(request: import("../cosmos/distribution/v1beta1/query").QueryDelegatorValidatorsRequest): Promise; + delegatorWithdrawAddress(request: import("../cosmos/distribution/v1beta1/query").QueryDelegatorWithdrawAddressRequest): Promise; + communityPool(request?: import("../cosmos/distribution/v1beta1/query").QueryCommunityPoolRequest): Promise; + }; + }; + evidence: { + v1beta1: { + evidence(request: import("../cosmos/evidence/v1beta1/query").QueryEvidenceRequest): Promise; + allEvidence(request?: import("../cosmos/evidence/v1beta1/query").QueryAllEvidenceRequest): Promise; + }; + }; + feegrant: { + v1beta1: { + allowance(request: import("../cosmos/feegrant/v1beta1/query").QueryAllowanceRequest): Promise; + allowances(request: import("../cosmos/feegrant/v1beta1/query").QueryAllowancesRequest): Promise; + allowancesByGranter(request: import("../cosmos/feegrant/v1beta1/query").QueryAllowancesByGranterRequest): Promise; + }; + }; + gov: { + v1: { + proposal(request: import("../cosmos/gov/v1/query").QueryProposalRequest): Promise; + proposals(request: import("../cosmos/gov/v1/query").QueryProposalsRequest): Promise; + vote(request: import("../cosmos/gov/v1/query").QueryVoteRequest): Promise; + votes(request: import("../cosmos/gov/v1/query").QueryVotesRequest): Promise; + params(request: import("../cosmos/gov/v1/query").QueryParamsRequest): Promise; + deposit(request: import("../cosmos/gov/v1/query").QueryDepositRequest): Promise; + deposits(request: import("../cosmos/gov/v1/query").QueryDepositsRequest): Promise; + tallyResult(request: import("../cosmos/gov/v1/query").QueryTallyResultRequest): Promise; + }; + v1beta1: { + proposal(request: import("../cosmos/gov/v1beta1/query").QueryProposalRequest): Promise; + proposals(request: import("../cosmos/gov/v1beta1/query").QueryProposalsRequest): Promise; + vote(request: import("../cosmos/gov/v1beta1/query").QueryVoteRequest): Promise; + votes(request: import("../cosmos/gov/v1beta1/query").QueryVotesRequest): Promise; + params(request: import("../cosmos/gov/v1beta1/query").QueryParamsRequest): Promise; + deposit(request: import("../cosmos/gov/v1beta1/query").QueryDepositRequest): Promise; + deposits(request: import("../cosmos/gov/v1beta1/query").QueryDepositsRequest): Promise; + tallyResult(request: import("../cosmos/gov/v1beta1/query").QueryTallyResultRequest): Promise; + }; + }; + group: { + v1: { + groupInfo(request: import("../cosmos/group/v1/query").QueryGroupInfoRequest): Promise; + groupPolicyInfo(request: import("../cosmos/group/v1/query").QueryGroupPolicyInfoRequest): Promise; + groupMembers(request: import("../cosmos/group/v1/query").QueryGroupMembersRequest): Promise; + groupsByAdmin(request: import("../cosmos/group/v1/query").QueryGroupsByAdminRequest): Promise; + groupPoliciesByGroup(request: import("../cosmos/group/v1/query").QueryGroupPoliciesByGroupRequest): Promise; + groupPoliciesByAdmin(request: import("../cosmos/group/v1/query").QueryGroupPoliciesByAdminRequest): Promise; + proposal(request: import("../cosmos/group/v1/query").QueryProposalRequest): Promise; + proposalsByGroupPolicy(request: import("../cosmos/group/v1/query").QueryProposalsByGroupPolicyRequest): Promise; + voteByProposalVoter(request: import("../cosmos/group/v1/query").QueryVoteByProposalVoterRequest): Promise; + votesByProposal(request: import("../cosmos/group/v1/query").QueryVotesByProposalRequest): Promise; + votesByVoter(request: import("../cosmos/group/v1/query").QueryVotesByVoterRequest): Promise; + groupsByMember(request: import("../cosmos/group/v1/query").QueryGroupsByMemberRequest): Promise; + tallyResult(request: import("../cosmos/group/v1/query").QueryTallyResultRequest): Promise; + }; + }; + mint: { + v1beta1: { + params(request?: import("../cosmos/mint/v1beta1/query").QueryParamsRequest): Promise; + inflation(request?: import("../cosmos/mint/v1beta1/query").QueryInflationRequest): Promise; + annualProvisions(request?: import("../cosmos/mint/v1beta1/query").QueryAnnualProvisionsRequest): Promise; + }; + }; + nft: { + v1beta1: { + balance(request: import("../cosmos/nft/v1beta1/query").QueryBalanceRequest): Promise; + owner(request: import("../cosmos/nft/v1beta1/query").QueryOwnerRequest): Promise; + supply(request: import("../cosmos/nft/v1beta1/query").QuerySupplyRequest): Promise; + nFTs(request: import("../cosmos/nft/v1beta1/query").QueryNFTsRequest): Promise; + nFT(request: import("../cosmos/nft/v1beta1/query").QueryNFTRequest): Promise; + class(request: import("../cosmos/nft/v1beta1/query").QueryClassRequest): Promise; + classes(request?: import("../cosmos/nft/v1beta1/query").QueryClassesRequest): Promise; + }; + }; + params: { + v1beta1: { + params(request: import("../cosmos/params/v1beta1/query").QueryParamsRequest): Promise; + subspaces(request?: import("../cosmos/params/v1beta1/query").QuerySubspacesRequest): Promise; + }; + }; + slashing: { + v1beta1: { + params(request?: import("../cosmos/slashing/v1beta1/query").QueryParamsRequest): Promise; + signingInfo(request: import("../cosmos/slashing/v1beta1/query").QuerySigningInfoRequest): Promise; + signingInfos(request?: import("../cosmos/slashing/v1beta1/query").QuerySigningInfosRequest): Promise; + }; + }; + staking: { + v1beta1: { + validators(request: import("../cosmos/staking/v1beta1/query").QueryValidatorsRequest): Promise; + validator(request: import("../cosmos/staking/v1beta1/query").QueryValidatorRequest): Promise; + validatorDelegations(request: import("../cosmos/staking/v1beta1/query").QueryValidatorDelegationsRequest): Promise; + validatorUnbondingDelegations(request: import("../cosmos/staking/v1beta1/query").QueryValidatorUnbondingDelegationsRequest): Promise; + delegation(request: import("../cosmos/staking/v1beta1/query").QueryDelegationRequest): Promise; + unbondingDelegation(request: import("../cosmos/staking/v1beta1/query").QueryUnbondingDelegationRequest): Promise; + delegatorDelegations(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorDelegationsRequest): Promise; + delegatorUnbondingDelegations(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorUnbondingDelegationsRequest): Promise; + redelegations(request: import("../cosmos/staking/v1beta1/query").QueryRedelegationsRequest): Promise; + delegatorValidators(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorValidatorsRequest): Promise; + delegatorValidator(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorValidatorRequest): Promise; + historicalInfo(request: import("../cosmos/staking/v1beta1/query").QueryHistoricalInfoRequest): Promise; + pool(request?: import("../cosmos/staking/v1beta1/query").QueryPoolRequest): Promise; + params(request?: import("../cosmos/staking/v1beta1/query").QueryParamsRequest): Promise; + }; + }; + tx: { + v1beta1: { + simulate(request: import("../cosmos/tx/v1beta1/service").SimulateRequest): Promise; + getTx(request: import("../cosmos/tx/v1beta1/service").GetTxRequest): Promise; + broadcastTx(request: import("../cosmos/tx/v1beta1/service").BroadcastTxRequest): Promise; + getTxsEvent(request: import("../cosmos/tx/v1beta1/service").GetTxsEventRequest): Promise; + getBlockWithTxs(request: import("../cosmos/tx/v1beta1/service").GetBlockWithTxsRequest): Promise; + }; + }; + upgrade: { + v1beta1: { + currentPlan(request?: import("../cosmos/upgrade/v1beta1/query").QueryCurrentPlanRequest): Promise; + appliedPlan(request: import("../cosmos/upgrade/v1beta1/query").QueryAppliedPlanRequest): Promise; + upgradedConsensusState(request: import("../cosmos/upgrade/v1beta1/query").QueryUpgradedConsensusStateRequest): Promise; + moduleVersions(request: import("../cosmos/upgrade/v1beta1/query").QueryModuleVersionsRequest): Promise; + authority(request?: import("../cosmos/upgrade/v1beta1/query").QueryAuthorityRequest): Promise; + }; + }; + }; + cosmwasm: { + wasm: { + v1: { + contractInfo(request: import("./wasm/v1/query").QueryContractInfoRequest): Promise; + contractHistory(request: import("./wasm/v1/query").QueryContractHistoryRequest): Promise; + contractsByCode(request: import("./wasm/v1/query").QueryContractsByCodeRequest): Promise; + allContractState(request: import("./wasm/v1/query").QueryAllContractStateRequest): Promise; + rawContractState(request: import("./wasm/v1/query").QueryRawContractStateRequest): Promise; + smartContractState(request: import("./wasm/v1/query").QuerySmartContractStateRequest): Promise; + code(request: import("./wasm/v1/query").QueryCodeRequest): Promise; + codes(request?: import("./wasm/v1/query").QueryCodesRequest): Promise; + pinnedCodes(request?: import("./wasm/v1/query").QueryPinnedCodesRequest): Promise; + params(request?: import("./wasm/v1/query").QueryParamsRequest): Promise; + contractsByCreator(request: import("./wasm/v1/query").QueryContractsByCreatorRequest): Promise; + }; + }; + }; +}>; diff --git a/packages/codegen/dist/cosmwasm/rpc.tx.d.ts b/packages/codegen/dist/cosmwasm/rpc.tx.d.ts new file mode 100644 index 00000000..5c33c61a --- /dev/null +++ b/packages/codegen/dist/cosmwasm/rpc.tx.d.ts @@ -0,0 +1,52 @@ +import { Rpc } from "../helpers"; +export declare const createRPCMsgClient: ({ rpc }: { + rpc: Rpc; +}) => Promise<{ + cosmos: { + authz: { + v1beta1: import("../cosmos/authz/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + bank: { + v1beta1: import("../cosmos/bank/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + crisis: { + v1beta1: import("../cosmos/crisis/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + distribution: { + v1beta1: import("../cosmos/distribution/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + evidence: { + v1beta1: import("../cosmos/evidence/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + feegrant: { + v1beta1: import("../cosmos/feegrant/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + gov: { + v1: import("../cosmos/gov/v1/tx.rpc.msg").MsgClientImpl; + v1beta1: import("../cosmos/gov/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + group: { + v1: import("../cosmos/group/v1/tx.rpc.msg").MsgClientImpl; + }; + nft: { + v1beta1: import("../cosmos/nft/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + slashing: { + v1beta1: import("../cosmos/slashing/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + staking: { + v1beta1: import("../cosmos/staking/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + upgrade: { + v1beta1: import("../cosmos/upgrade/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + vesting: { + v1beta1: import("../cosmos/vesting/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + }; + cosmwasm: { + wasm: { + v1: import("./wasm/v1/tx.rpc.msg").MsgClientImpl; + }; + }; +}>; diff --git a/packages/codegen/dist/cosmwasm/wasm/v1/authz.d.ts b/packages/codegen/dist/cosmwasm/wasm/v1/authz.d.ts new file mode 100644 index 00000000..04931a1c --- /dev/null +++ b/packages/codegen/dist/cosmwasm/wasm/v1/authz.d.ts @@ -0,0 +1,206 @@ +/// +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Coin, CoinSDKType } from "../../../cosmos/base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** + * ContractExecutionAuthorization defines authorization for wasm execute. + * Since: wasmd 0.30 + */ +export interface ContractExecutionAuthorization { + /** Grants for contract executions */ + grants: ContractGrant[]; +} +/** + * ContractExecutionAuthorization defines authorization for wasm execute. + * Since: wasmd 0.30 + */ +export interface ContractExecutionAuthorizationSDKType { + grants: ContractGrantSDKType[]; +} +/** + * ContractMigrationAuthorization defines authorization for wasm contract + * migration. Since: wasmd 0.30 + */ +export interface ContractMigrationAuthorization { + /** Grants for contract migrations */ + grants: ContractGrant[]; +} +/** + * ContractMigrationAuthorization defines authorization for wasm contract + * migration. Since: wasmd 0.30 + */ +export interface ContractMigrationAuthorizationSDKType { + grants: ContractGrantSDKType[]; +} +/** + * ContractGrant a granted permission for a single contract + * Since: wasmd 0.30 + */ +export interface ContractGrant { + /** Contract is the bech32 address of the smart contract */ + contract: string; + /** + * Limit defines execution limits that are enforced and updated when the grant + * is applied. When the limit lapsed the grant is removed. + */ + limit?: Any; + /** + * Filter define more fine-grained control on the message payload passed + * to the contract in the operation. When no filter applies on execution, the + * operation is prohibited. + */ + filter?: Any; +} +/** + * ContractGrant a granted permission for a single contract + * Since: wasmd 0.30 + */ +export interface ContractGrantSDKType { + contract: string; + limit?: AnySDKType; + filter?: AnySDKType; +} +/** + * MaxCallsLimit limited number of calls to the contract. No funds transferable. + * Since: wasmd 0.30 + */ +export interface MaxCallsLimit { + /** Remaining number that is decremented on each execution */ + remaining: Long; +} +/** + * MaxCallsLimit limited number of calls to the contract. No funds transferable. + * Since: wasmd 0.30 + */ +export interface MaxCallsLimitSDKType { + remaining: Long; +} +/** + * MaxFundsLimit defines the maximal amounts that can be sent to the contract. + * Since: wasmd 0.30 + */ +export interface MaxFundsLimit { + /** Amounts is the maximal amount of tokens transferable to the contract. */ + amounts: Coin[]; +} +/** + * MaxFundsLimit defines the maximal amounts that can be sent to the contract. + * Since: wasmd 0.30 + */ +export interface MaxFundsLimitSDKType { + amounts: CoinSDKType[]; +} +/** + * CombinedLimit defines the maximal amounts that can be sent to a contract and + * the maximal number of calls executable. Both need to remain >0 to be valid. + * Since: wasmd 0.30 + */ +export interface CombinedLimit { + /** Remaining number that is decremented on each execution */ + callsRemaining: Long; + /** Amounts is the maximal amount of tokens transferable to the contract. */ + amounts: Coin[]; +} +/** + * CombinedLimit defines the maximal amounts that can be sent to a contract and + * the maximal number of calls executable. Both need to remain >0 to be valid. + * Since: wasmd 0.30 + */ +export interface CombinedLimitSDKType { + calls_remaining: Long; + amounts: CoinSDKType[]; +} +/** + * AllowAllMessagesFilter is a wildcard to allow any type of contract payload + * message. + * Since: wasmd 0.30 + */ +export interface AllowAllMessagesFilter { +} +/** + * AllowAllMessagesFilter is a wildcard to allow any type of contract payload + * message. + * Since: wasmd 0.30 + */ +export interface AllowAllMessagesFilterSDKType { +} +/** + * AcceptedMessageKeysFilter accept only the specific contract message keys in + * the json object to be executed. + * Since: wasmd 0.30 + */ +export interface AcceptedMessageKeysFilter { + /** Messages is the list of unique keys */ + keys: string[]; +} +/** + * AcceptedMessageKeysFilter accept only the specific contract message keys in + * the json object to be executed. + * Since: wasmd 0.30 + */ +export interface AcceptedMessageKeysFilterSDKType { + keys: string[]; +} +/** + * AcceptedMessagesFilter accept only the specific raw contract messages to be + * executed. + * Since: wasmd 0.30 + */ +export interface AcceptedMessagesFilter { + /** Messages is the list of raw contract messages */ + messages: Uint8Array[]; +} +/** + * AcceptedMessagesFilter accept only the specific raw contract messages to be + * executed. + * Since: wasmd 0.30 + */ +export interface AcceptedMessagesFilterSDKType { + messages: Uint8Array[]; +} +export declare const ContractExecutionAuthorization: { + encode(message: ContractExecutionAuthorization, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ContractExecutionAuthorization; + fromPartial(object: DeepPartial): ContractExecutionAuthorization; +}; +export declare const ContractMigrationAuthorization: { + encode(message: ContractMigrationAuthorization, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ContractMigrationAuthorization; + fromPartial(object: DeepPartial): ContractMigrationAuthorization; +}; +export declare const ContractGrant: { + encode(message: ContractGrant, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ContractGrant; + fromPartial(object: DeepPartial): ContractGrant; +}; +export declare const MaxCallsLimit: { + encode(message: MaxCallsLimit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MaxCallsLimit; + fromPartial(object: DeepPartial): MaxCallsLimit; +}; +export declare const MaxFundsLimit: { + encode(message: MaxFundsLimit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MaxFundsLimit; + fromPartial(object: DeepPartial): MaxFundsLimit; +}; +export declare const CombinedLimit: { + encode(message: CombinedLimit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CombinedLimit; + fromPartial(object: DeepPartial): CombinedLimit; +}; +export declare const AllowAllMessagesFilter: { + encode(_: AllowAllMessagesFilter, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AllowAllMessagesFilter; + fromPartial(_: DeepPartial): AllowAllMessagesFilter; +}; +export declare const AcceptedMessageKeysFilter: { + encode(message: AcceptedMessageKeysFilter, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AcceptedMessageKeysFilter; + fromPartial(object: DeepPartial): AcceptedMessageKeysFilter; +}; +export declare const AcceptedMessagesFilter: { + encode(message: AcceptedMessagesFilter, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AcceptedMessagesFilter; + fromPartial(object: DeepPartial): AcceptedMessagesFilter; +}; diff --git a/packages/codegen/dist/cosmwasm/wasm/v1/genesis.d.ts b/packages/codegen/dist/cosmwasm/wasm/v1/genesis.d.ts new file mode 100644 index 00000000..525813ea --- /dev/null +++ b/packages/codegen/dist/cosmwasm/wasm/v1/genesis.d.ts @@ -0,0 +1,77 @@ +/// +import { Params, ParamsSDKType, CodeInfo, CodeInfoSDKType, ContractInfo, ContractInfoSDKType, Model, ModelSDKType, ContractCodeHistoryEntry, ContractCodeHistoryEntrySDKType } from "./types"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** GenesisState - genesis state of x/wasm */ +export interface GenesisState { + params?: Params; + codes: Code[]; + contracts: Contract[]; + sequences: Sequence[]; +} +/** GenesisState - genesis state of x/wasm */ +export interface GenesisStateSDKType { + params?: ParamsSDKType; + codes: CodeSDKType[]; + contracts: ContractSDKType[]; + sequences: SequenceSDKType[]; +} +/** Code struct encompasses CodeInfo and CodeBytes */ +export interface Code { + codeId: Long; + codeInfo?: CodeInfo; + codeBytes: Uint8Array; + /** Pinned to wasmvm cache */ + pinned: boolean; +} +/** Code struct encompasses CodeInfo and CodeBytes */ +export interface CodeSDKType { + code_id: Long; + code_info?: CodeInfoSDKType; + code_bytes: Uint8Array; + pinned: boolean; +} +/** Contract struct encompasses ContractAddress, ContractInfo, and ContractState */ +export interface Contract { + contractAddress: string; + contractInfo?: ContractInfo; + contractState: Model[]; + contractCodeHistory: ContractCodeHistoryEntry[]; +} +/** Contract struct encompasses ContractAddress, ContractInfo, and ContractState */ +export interface ContractSDKType { + contract_address: string; + contract_info?: ContractInfoSDKType; + contract_state: ModelSDKType[]; + contract_code_history: ContractCodeHistoryEntrySDKType[]; +} +/** Sequence key and value of an id generation counter */ +export interface Sequence { + idKey: Uint8Array; + value: Long; +} +/** Sequence key and value of an id generation counter */ +export interface SequenceSDKType { + id_key: Uint8Array; + value: Long; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; +export declare const Code: { + encode(message: Code, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Code; + fromPartial(object: DeepPartial): Code; +}; +export declare const Contract: { + encode(message: Contract, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Contract; + fromPartial(object: DeepPartial): Contract; +}; +export declare const Sequence: { + encode(message: Sequence, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Sequence; + fromPartial(object: DeepPartial): Sequence; +}; diff --git a/packages/codegen/dist/cosmwasm/wasm/v1/ibc.d.ts b/packages/codegen/dist/cosmwasm/wasm/v1/ibc.d.ts new file mode 100644 index 00000000..8c625754 --- /dev/null +++ b/packages/codegen/dist/cosmwasm/wasm/v1/ibc.d.ts @@ -0,0 +1,48 @@ +/// +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** MsgIBCSend */ +export interface MsgIBCSend { + /** the channel by which the packet will be sent */ + channel: string; + /** + * Timeout height relative to the current block height. + * The timeout is disabled when set to 0. + */ + timeoutHeight: Long; + /** + * Timeout timestamp (in nanoseconds) relative to the current block timestamp. + * The timeout is disabled when set to 0. + */ + timeoutTimestamp: Long; + /** + * Data is the payload to transfer. We must not make assumption what format or + * content is in here. + */ + data: Uint8Array; +} +/** MsgIBCSend */ +export interface MsgIBCSendSDKType { + channel: string; + timeout_height: Long; + timeout_timestamp: Long; + data: Uint8Array; +} +/** MsgIBCCloseChannel port and channel need to be owned by the contract */ +export interface MsgIBCCloseChannel { + channel: string; +} +/** MsgIBCCloseChannel port and channel need to be owned by the contract */ +export interface MsgIBCCloseChannelSDKType { + channel: string; +} +export declare const MsgIBCSend: { + encode(message: MsgIBCSend, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgIBCSend; + fromPartial(object: DeepPartial): MsgIBCSend; +}; +export declare const MsgIBCCloseChannel: { + encode(message: MsgIBCCloseChannel, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgIBCCloseChannel; + fromPartial(object: DeepPartial): MsgIBCCloseChannel; +}; diff --git a/packages/codegen/dist/cosmwasm/wasm/v1/proposal.d.ts b/packages/codegen/dist/cosmwasm/wasm/v1/proposal.d.ts new file mode 100644 index 00000000..b56e560f --- /dev/null +++ b/packages/codegen/dist/cosmwasm/wasm/v1/proposal.d.ts @@ -0,0 +1,439 @@ +/// +import { AccessConfig, AccessConfigSDKType } from "./types"; +import { Coin, CoinSDKType } from "../../../cosmos/base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** StoreCodeProposal gov proposal content type to submit WASM code to the system */ +export interface StoreCodeProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + description: string; + /** RunAs is the address that is passed to the contract's environment as sender */ + runAs: string; + /** WASMByteCode can be raw or gzip compressed */ + wasmByteCode: Uint8Array; + /** InstantiatePermission to apply on contract creation, optional */ + instantiatePermission?: AccessConfig; + /** UnpinCode code on upload, optional */ + unpinCode: boolean; + /** Source is the URL where the code is hosted */ + source: string; + /** + * Builder is the docker image used to build the code deterministically, used + * for smart contract verification + */ + builder: string; + /** + * CodeHash is the SHA256 sum of the code outputted by builder, used for smart + * contract verification + */ + codeHash: Uint8Array; +} +/** StoreCodeProposal gov proposal content type to submit WASM code to the system */ +export interface StoreCodeProposalSDKType { + title: string; + description: string; + run_as: string; + wasm_byte_code: Uint8Array; + instantiate_permission?: AccessConfigSDKType; + unpin_code: boolean; + source: string; + builder: string; + code_hash: Uint8Array; +} +/** + * InstantiateContractProposal gov proposal content type to instantiate a + * contract. + */ +export interface InstantiateContractProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + description: string; + /** RunAs is the address that is passed to the contract's environment as sender */ + runAs: string; + /** Admin is an optional address that can execute migrations */ + admin: string; + /** CodeID is the reference to the stored WASM code */ + codeId: Long; + /** Label is optional metadata to be stored with a constract instance. */ + label: string; + /** Msg json encoded message to be passed to the contract on instantiation */ + msg: Uint8Array; + /** Funds coins that are transferred to the contract on instantiation */ + funds: Coin[]; +} +/** + * InstantiateContractProposal gov proposal content type to instantiate a + * contract. + */ +export interface InstantiateContractProposalSDKType { + title: string; + description: string; + run_as: string; + admin: string; + code_id: Long; + label: string; + msg: Uint8Array; + funds: CoinSDKType[]; +} +/** + * InstantiateContract2Proposal gov proposal content type to instantiate + * contract 2 + */ +export interface InstantiateContract2Proposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + description: string; + /** RunAs is the address that is passed to the contract's enviroment as sender */ + runAs: string; + /** Admin is an optional address that can execute migrations */ + admin: string; + /** CodeID is the reference to the stored WASM code */ + codeId: Long; + /** Label is optional metadata to be stored with a constract instance. */ + label: string; + /** Msg json encode message to be passed to the contract on instantiation */ + msg: Uint8Array; + /** Funds coins that are transferred to the contract on instantiation */ + funds: Coin[]; + /** Salt is an arbitrary value provided by the sender. Size can be 1 to 64. */ + salt: Uint8Array; + /** + * FixMsg include the msg value into the hash for the predictable address. + * Default is false + */ + fixMsg: boolean; +} +/** + * InstantiateContract2Proposal gov proposal content type to instantiate + * contract 2 + */ +export interface InstantiateContract2ProposalSDKType { + title: string; + description: string; + run_as: string; + admin: string; + code_id: Long; + label: string; + msg: Uint8Array; + funds: CoinSDKType[]; + salt: Uint8Array; + fix_msg: boolean; +} +/** MigrateContractProposal gov proposal content type to migrate a contract. */ +export interface MigrateContractProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + description: string; + /** Contract is the address of the smart contract */ + contract: string; + /** CodeID references the new WASM code */ + codeId: Long; + /** Msg json encoded message to be passed to the contract on migration */ + msg: Uint8Array; +} +/** MigrateContractProposal gov proposal content type to migrate a contract. */ +export interface MigrateContractProposalSDKType { + title: string; + description: string; + contract: string; + code_id: Long; + msg: Uint8Array; +} +/** SudoContractProposal gov proposal content type to call sudo on a contract. */ +export interface SudoContractProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + description: string; + /** Contract is the address of the smart contract */ + contract: string; + /** Msg json encoded message to be passed to the contract as sudo */ + msg: Uint8Array; +} +/** SudoContractProposal gov proposal content type to call sudo on a contract. */ +export interface SudoContractProposalSDKType { + title: string; + description: string; + contract: string; + msg: Uint8Array; +} +/** + * ExecuteContractProposal gov proposal content type to call execute on a + * contract. + */ +export interface ExecuteContractProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + description: string; + /** RunAs is the address that is passed to the contract's environment as sender */ + runAs: string; + /** Contract is the address of the smart contract */ + contract: string; + /** Msg json encoded message to be passed to the contract as execute */ + msg: Uint8Array; + /** Funds coins that are transferred to the contract on instantiation */ + funds: Coin[]; +} +/** + * ExecuteContractProposal gov proposal content type to call execute on a + * contract. + */ +export interface ExecuteContractProposalSDKType { + title: string; + description: string; + run_as: string; + contract: string; + msg: Uint8Array; + funds: CoinSDKType[]; +} +/** UpdateAdminProposal gov proposal content type to set an admin for a contract. */ +export interface UpdateAdminProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + description: string; + /** NewAdmin address to be set */ + newAdmin: string; + /** Contract is the address of the smart contract */ + contract: string; +} +/** UpdateAdminProposal gov proposal content type to set an admin for a contract. */ +export interface UpdateAdminProposalSDKType { + title: string; + description: string; + new_admin: string; + contract: string; +} +/** + * ClearAdminProposal gov proposal content type to clear the admin of a + * contract. + */ +export interface ClearAdminProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + description: string; + /** Contract is the address of the smart contract */ + contract: string; +} +/** + * ClearAdminProposal gov proposal content type to clear the admin of a + * contract. + */ +export interface ClearAdminProposalSDKType { + title: string; + description: string; + contract: string; +} +/** + * PinCodesProposal gov proposal content type to pin a set of code ids in the + * wasmvm cache. + */ +export interface PinCodesProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + description: string; + /** CodeIDs references the new WASM codes */ + codeIds: Long[]; +} +/** + * PinCodesProposal gov proposal content type to pin a set of code ids in the + * wasmvm cache. + */ +export interface PinCodesProposalSDKType { + title: string; + description: string; + code_ids: Long[]; +} +/** + * UnpinCodesProposal gov proposal content type to unpin a set of code ids in + * the wasmvm cache. + */ +export interface UnpinCodesProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + description: string; + /** CodeIDs references the WASM codes */ + codeIds: Long[]; +} +/** + * UnpinCodesProposal gov proposal content type to unpin a set of code ids in + * the wasmvm cache. + */ +export interface UnpinCodesProposalSDKType { + title: string; + description: string; + code_ids: Long[]; +} +/** + * AccessConfigUpdate contains the code id and the access config to be + * applied. + */ +export interface AccessConfigUpdate { + /** CodeID is the reference to the stored WASM code to be updated */ + codeId: Long; + /** InstantiatePermission to apply to the set of code ids */ + instantiatePermission?: AccessConfig; +} +/** + * AccessConfigUpdate contains the code id and the access config to be + * applied. + */ +export interface AccessConfigUpdateSDKType { + code_id: Long; + instantiate_permission?: AccessConfigSDKType; +} +/** + * UpdateInstantiateConfigProposal gov proposal content type to update + * instantiate config to a set of code ids. + */ +export interface UpdateInstantiateConfigProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + description: string; + /** + * AccessConfigUpdate contains the list of code ids and the access config + * to be applied. + */ + accessConfigUpdates: AccessConfigUpdate[]; +} +/** + * UpdateInstantiateConfigProposal gov proposal content type to update + * instantiate config to a set of code ids. + */ +export interface UpdateInstantiateConfigProposalSDKType { + title: string; + description: string; + access_config_updates: AccessConfigUpdateSDKType[]; +} +/** + * StoreAndInstantiateContractProposal gov proposal content type to store + * and instantiate the contract. + */ +export interface StoreAndInstantiateContractProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + description: string; + /** RunAs is the address that is passed to the contract's environment as sender */ + runAs: string; + /** WASMByteCode can be raw or gzip compressed */ + wasmByteCode: Uint8Array; + /** InstantiatePermission to apply on contract creation, optional */ + instantiatePermission?: AccessConfig; + /** UnpinCode code on upload, optional */ + unpinCode: boolean; + /** Admin is an optional address that can execute migrations */ + admin: string; + /** Label is optional metadata to be stored with a constract instance. */ + label: string; + /** Msg json encoded message to be passed to the contract on instantiation */ + msg: Uint8Array; + /** Funds coins that are transferred to the contract on instantiation */ + funds: Coin[]; + /** Source is the URL where the code is hosted */ + source: string; + /** + * Builder is the docker image used to build the code deterministically, used + * for smart contract verification + */ + builder: string; + /** + * CodeHash is the SHA256 sum of the code outputted by builder, used for smart + * contract verification + */ + codeHash: Uint8Array; +} +/** + * StoreAndInstantiateContractProposal gov proposal content type to store + * and instantiate the contract. + */ +export interface StoreAndInstantiateContractProposalSDKType { + title: string; + description: string; + run_as: string; + wasm_byte_code: Uint8Array; + instantiate_permission?: AccessConfigSDKType; + unpin_code: boolean; + admin: string; + label: string; + msg: Uint8Array; + funds: CoinSDKType[]; + source: string; + builder: string; + code_hash: Uint8Array; +} +export declare const StoreCodeProposal: { + encode(message: StoreCodeProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): StoreCodeProposal; + fromPartial(object: DeepPartial): StoreCodeProposal; +}; +export declare const InstantiateContractProposal: { + encode(message: InstantiateContractProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): InstantiateContractProposal; + fromPartial(object: DeepPartial): InstantiateContractProposal; +}; +export declare const InstantiateContract2Proposal: { + encode(message: InstantiateContract2Proposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): InstantiateContract2Proposal; + fromPartial(object: DeepPartial): InstantiateContract2Proposal; +}; +export declare const MigrateContractProposal: { + encode(message: MigrateContractProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MigrateContractProposal; + fromPartial(object: DeepPartial): MigrateContractProposal; +}; +export declare const SudoContractProposal: { + encode(message: SudoContractProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SudoContractProposal; + fromPartial(object: DeepPartial): SudoContractProposal; +}; +export declare const ExecuteContractProposal: { + encode(message: ExecuteContractProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ExecuteContractProposal; + fromPartial(object: DeepPartial): ExecuteContractProposal; +}; +export declare const UpdateAdminProposal: { + encode(message: UpdateAdminProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): UpdateAdminProposal; + fromPartial(object: DeepPartial): UpdateAdminProposal; +}; +export declare const ClearAdminProposal: { + encode(message: ClearAdminProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ClearAdminProposal; + fromPartial(object: DeepPartial): ClearAdminProposal; +}; +export declare const PinCodesProposal: { + encode(message: PinCodesProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PinCodesProposal; + fromPartial(object: DeepPartial): PinCodesProposal; +}; +export declare const UnpinCodesProposal: { + encode(message: UnpinCodesProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): UnpinCodesProposal; + fromPartial(object: DeepPartial): UnpinCodesProposal; +}; +export declare const AccessConfigUpdate: { + encode(message: AccessConfigUpdate, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AccessConfigUpdate; + fromPartial(object: DeepPartial): AccessConfigUpdate; +}; +export declare const UpdateInstantiateConfigProposal: { + encode(message: UpdateInstantiateConfigProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): UpdateInstantiateConfigProposal; + fromPartial(object: DeepPartial): UpdateInstantiateConfigProposal; +}; +export declare const StoreAndInstantiateContractProposal: { + encode(message: StoreAndInstantiateContractProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): StoreAndInstantiateContractProposal; + fromPartial(object: DeepPartial): StoreAndInstantiateContractProposal; +}; diff --git a/packages/codegen/dist/cosmwasm/wasm/v1/query.d.ts b/packages/codegen/dist/cosmwasm/wasm/v1/query.d.ts new file mode 100644 index 00000000..3a446784 --- /dev/null +++ b/packages/codegen/dist/cosmwasm/wasm/v1/query.d.ts @@ -0,0 +1,461 @@ +/// +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../../cosmos/base/query/v1beta1/pagination"; +import { ContractInfo, ContractInfoSDKType, ContractCodeHistoryEntry, ContractCodeHistoryEntrySDKType, Model, ModelSDKType, AccessConfig, AccessConfigSDKType, Params, ParamsSDKType } from "./types"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** + * QueryContractInfoRequest is the request type for the Query/ContractInfo RPC + * method + */ +export interface QueryContractInfoRequest { + /** address is the address of the contract to query */ + address: string; +} +/** + * QueryContractInfoRequest is the request type for the Query/ContractInfo RPC + * method + */ +export interface QueryContractInfoRequestSDKType { + address: string; +} +/** + * QueryContractInfoResponse is the response type for the Query/ContractInfo RPC + * method + */ +export interface QueryContractInfoResponse { + /** address is the address of the contract */ + address: string; + contractInfo?: ContractInfo; +} +/** + * QueryContractInfoResponse is the response type for the Query/ContractInfo RPC + * method + */ +export interface QueryContractInfoResponseSDKType { + address: string; + contract_info?: ContractInfoSDKType; +} +/** + * QueryContractHistoryRequest is the request type for the Query/ContractHistory + * RPC method + */ +export interface QueryContractHistoryRequest { + /** address is the address of the contract to query */ + address: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryContractHistoryRequest is the request type for the Query/ContractHistory + * RPC method + */ +export interface QueryContractHistoryRequestSDKType { + address: string; + pagination?: PageRequestSDKType; +} +/** + * QueryContractHistoryResponse is the response type for the + * Query/ContractHistory RPC method + */ +export interface QueryContractHistoryResponse { + entries: ContractCodeHistoryEntry[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryContractHistoryResponse is the response type for the + * Query/ContractHistory RPC method + */ +export interface QueryContractHistoryResponseSDKType { + entries: ContractCodeHistoryEntrySDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryContractsByCodeRequest is the request type for the Query/ContractsByCode + * RPC method + */ +export interface QueryContractsByCodeRequest { + /** + * grpc-gateway_out does not support Go style CodID + * pagination defines an optional pagination for the request. + */ + codeId: Long; + pagination?: PageRequest; +} +/** + * QueryContractsByCodeRequest is the request type for the Query/ContractsByCode + * RPC method + */ +export interface QueryContractsByCodeRequestSDKType { + code_id: Long; + pagination?: PageRequestSDKType; +} +/** + * QueryContractsByCodeResponse is the response type for the + * Query/ContractsByCode RPC method + */ +export interface QueryContractsByCodeResponse { + /** contracts are a set of contract addresses */ + contracts: string[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryContractsByCodeResponse is the response type for the + * Query/ContractsByCode RPC method + */ +export interface QueryContractsByCodeResponseSDKType { + contracts: string[]; + pagination?: PageResponseSDKType; +} +/** + * QueryAllContractStateRequest is the request type for the + * Query/AllContractState RPC method + */ +export interface QueryAllContractStateRequest { + /** address is the address of the contract */ + address: string; + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryAllContractStateRequest is the request type for the + * Query/AllContractState RPC method + */ +export interface QueryAllContractStateRequestSDKType { + address: string; + pagination?: PageRequestSDKType; +} +/** + * QueryAllContractStateResponse is the response type for the + * Query/AllContractState RPC method + */ +export interface QueryAllContractStateResponse { + models: Model[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryAllContractStateResponse is the response type for the + * Query/AllContractState RPC method + */ +export interface QueryAllContractStateResponseSDKType { + models: ModelSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryRawContractStateRequest is the request type for the + * Query/RawContractState RPC method + */ +export interface QueryRawContractStateRequest { + /** address is the address of the contract */ + address: string; + queryData: Uint8Array; +} +/** + * QueryRawContractStateRequest is the request type for the + * Query/RawContractState RPC method + */ +export interface QueryRawContractStateRequestSDKType { + address: string; + query_data: Uint8Array; +} +/** + * QueryRawContractStateResponse is the response type for the + * Query/RawContractState RPC method + */ +export interface QueryRawContractStateResponse { + /** Data contains the raw store data */ + data: Uint8Array; +} +/** + * QueryRawContractStateResponse is the response type for the + * Query/RawContractState RPC method + */ +export interface QueryRawContractStateResponseSDKType { + data: Uint8Array; +} +/** + * QuerySmartContractStateRequest is the request type for the + * Query/SmartContractState RPC method + */ +export interface QuerySmartContractStateRequest { + /** address is the address of the contract */ + address: string; + /** QueryData contains the query data passed to the contract */ + queryData: Uint8Array; +} +/** + * QuerySmartContractStateRequest is the request type for the + * Query/SmartContractState RPC method + */ +export interface QuerySmartContractStateRequestSDKType { + address: string; + query_data: Uint8Array; +} +/** + * QuerySmartContractStateResponse is the response type for the + * Query/SmartContractState RPC method + */ +export interface QuerySmartContractStateResponse { + /** Data contains the json data returned from the smart contract */ + data: Uint8Array; +} +/** + * QuerySmartContractStateResponse is the response type for the + * Query/SmartContractState RPC method + */ +export interface QuerySmartContractStateResponseSDKType { + data: Uint8Array; +} +/** QueryCodeRequest is the request type for the Query/Code RPC method */ +export interface QueryCodeRequest { + /** grpc-gateway_out does not support Go style CodID */ + codeId: Long; +} +/** QueryCodeRequest is the request type for the Query/Code RPC method */ +export interface QueryCodeRequestSDKType { + code_id: Long; +} +/** CodeInfoResponse contains code meta data from CodeInfo */ +export interface CodeInfoResponse { + codeId: Long; + creator: string; + dataHash: Uint8Array; + instantiatePermission?: AccessConfig; +} +/** CodeInfoResponse contains code meta data from CodeInfo */ +export interface CodeInfoResponseSDKType { + code_id: Long; + creator: string; + data_hash: Uint8Array; + instantiate_permission?: AccessConfigSDKType; +} +/** QueryCodeResponse is the response type for the Query/Code RPC method */ +export interface QueryCodeResponse { + codeInfo?: CodeInfoResponse; + data: Uint8Array; +} +/** QueryCodeResponse is the response type for the Query/Code RPC method */ +export interface QueryCodeResponseSDKType { + code_info?: CodeInfoResponseSDKType; + data: Uint8Array; +} +/** QueryCodesRequest is the request type for the Query/Codes RPC method */ +export interface QueryCodesRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryCodesRequest is the request type for the Query/Codes RPC method */ +export interface QueryCodesRequestSDKType { + pagination?: PageRequestSDKType; +} +/** QueryCodesResponse is the response type for the Query/Codes RPC method */ +export interface QueryCodesResponse { + codeInfos: CodeInfoResponse[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** QueryCodesResponse is the response type for the Query/Codes RPC method */ +export interface QueryCodesResponseSDKType { + code_infos: CodeInfoResponseSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryPinnedCodesRequest is the request type for the Query/PinnedCodes + * RPC method + */ +export interface QueryPinnedCodesRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryPinnedCodesRequest is the request type for the Query/PinnedCodes + * RPC method + */ +export interface QueryPinnedCodesRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryPinnedCodesResponse is the response type for the + * Query/PinnedCodes RPC method + */ +export interface QueryPinnedCodesResponse { + codeIds: Long[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryPinnedCodesResponse is the response type for the + * Query/PinnedCodes RPC method + */ +export interface QueryPinnedCodesResponseSDKType { + code_ids: Long[]; + pagination?: PageResponseSDKType; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequest { +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequestSDKType { +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params?: Params; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +/** + * QueryContractsByCreatorRequest is the request type for the + * Query/ContractsByCreator RPC method. + */ +export interface QueryContractsByCreatorRequest { + /** CreatorAddress is the address of contract creator */ + creatorAddress: string; + /** Pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryContractsByCreatorRequest is the request type for the + * Query/ContractsByCreator RPC method. + */ +export interface QueryContractsByCreatorRequestSDKType { + creator_address: string; + pagination?: PageRequestSDKType; +} +/** + * QueryContractsByCreatorResponse is the response type for the + * Query/ContractsByCreator RPC method. + */ +export interface QueryContractsByCreatorResponse { + /** ContractAddresses result set */ + contractAddresses: string[]; + /** Pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryContractsByCreatorResponse is the response type for the + * Query/ContractsByCreator RPC method. + */ +export interface QueryContractsByCreatorResponseSDKType { + contract_addresses: string[]; + pagination?: PageResponseSDKType; +} +export declare const QueryContractInfoRequest: { + encode(message: QueryContractInfoRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractInfoRequest; + fromPartial(object: DeepPartial): QueryContractInfoRequest; +}; +export declare const QueryContractInfoResponse: { + encode(message: QueryContractInfoResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractInfoResponse; + fromPartial(object: DeepPartial): QueryContractInfoResponse; +}; +export declare const QueryContractHistoryRequest: { + encode(message: QueryContractHistoryRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractHistoryRequest; + fromPartial(object: DeepPartial): QueryContractHistoryRequest; +}; +export declare const QueryContractHistoryResponse: { + encode(message: QueryContractHistoryResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractHistoryResponse; + fromPartial(object: DeepPartial): QueryContractHistoryResponse; +}; +export declare const QueryContractsByCodeRequest: { + encode(message: QueryContractsByCodeRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractsByCodeRequest; + fromPartial(object: DeepPartial): QueryContractsByCodeRequest; +}; +export declare const QueryContractsByCodeResponse: { + encode(message: QueryContractsByCodeResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractsByCodeResponse; + fromPartial(object: DeepPartial): QueryContractsByCodeResponse; +}; +export declare const QueryAllContractStateRequest: { + encode(message: QueryAllContractStateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllContractStateRequest; + fromPartial(object: DeepPartial): QueryAllContractStateRequest; +}; +export declare const QueryAllContractStateResponse: { + encode(message: QueryAllContractStateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllContractStateResponse; + fromPartial(object: DeepPartial): QueryAllContractStateResponse; +}; +export declare const QueryRawContractStateRequest: { + encode(message: QueryRawContractStateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryRawContractStateRequest; + fromPartial(object: DeepPartial): QueryRawContractStateRequest; +}; +export declare const QueryRawContractStateResponse: { + encode(message: QueryRawContractStateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryRawContractStateResponse; + fromPartial(object: DeepPartial): QueryRawContractStateResponse; +}; +export declare const QuerySmartContractStateRequest: { + encode(message: QuerySmartContractStateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySmartContractStateRequest; + fromPartial(object: DeepPartial): QuerySmartContractStateRequest; +}; +export declare const QuerySmartContractStateResponse: { + encode(message: QuerySmartContractStateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySmartContractStateResponse; + fromPartial(object: DeepPartial): QuerySmartContractStateResponse; +}; +export declare const QueryCodeRequest: { + encode(message: QueryCodeRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCodeRequest; + fromPartial(object: DeepPartial): QueryCodeRequest; +}; +export declare const CodeInfoResponse: { + encode(message: CodeInfoResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CodeInfoResponse; + fromPartial(object: DeepPartial): CodeInfoResponse; +}; +export declare const QueryCodeResponse: { + encode(message: QueryCodeResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCodeResponse; + fromPartial(object: DeepPartial): QueryCodeResponse; +}; +export declare const QueryCodesRequest: { + encode(message: QueryCodesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCodesRequest; + fromPartial(object: DeepPartial): QueryCodesRequest; +}; +export declare const QueryCodesResponse: { + encode(message: QueryCodesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCodesResponse; + fromPartial(object: DeepPartial): QueryCodesResponse; +}; +export declare const QueryPinnedCodesRequest: { + encode(message: QueryPinnedCodesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPinnedCodesRequest; + fromPartial(object: DeepPartial): QueryPinnedCodesRequest; +}; +export declare const QueryPinnedCodesResponse: { + encode(message: QueryPinnedCodesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPinnedCodesResponse; + fromPartial(object: DeepPartial): QueryPinnedCodesResponse; +}; +export declare const QueryParamsRequest: { + encode(_: QueryParamsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest; + fromPartial(_: DeepPartial): QueryParamsRequest; +}; +export declare const QueryParamsResponse: { + encode(message: QueryParamsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse; + fromPartial(object: DeepPartial): QueryParamsResponse; +}; +export declare const QueryContractsByCreatorRequest: { + encode(message: QueryContractsByCreatorRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractsByCreatorRequest; + fromPartial(object: DeepPartial): QueryContractsByCreatorRequest; +}; +export declare const QueryContractsByCreatorResponse: { + encode(message: QueryContractsByCreatorResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractsByCreatorResponse; + fromPartial(object: DeepPartial): QueryContractsByCreatorResponse; +}; diff --git a/packages/codegen/dist/cosmwasm/wasm/v1/query.lcd.d.ts b/packages/codegen/dist/cosmwasm/wasm/v1/query.lcd.d.ts new file mode 100644 index 00000000..b897bd38 --- /dev/null +++ b/packages/codegen/dist/cosmwasm/wasm/v1/query.lcd.d.ts @@ -0,0 +1,19 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryContractInfoRequest, QueryContractInfoResponseSDKType, QueryContractHistoryRequest, QueryContractHistoryResponseSDKType, QueryContractsByCodeRequest, QueryContractsByCodeResponseSDKType, QueryAllContractStateRequest, QueryAllContractStateResponseSDKType, QueryRawContractStateRequest, QueryRawContractStateResponseSDKType, QuerySmartContractStateRequest, QuerySmartContractStateResponseSDKType, QueryCodeRequest, QueryCodeResponseSDKType, QueryCodesRequest, QueryCodesResponseSDKType, QueryPinnedCodesRequest, QueryPinnedCodesResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType, QueryContractsByCreatorRequest, QueryContractsByCreatorResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + contractInfo(params: QueryContractInfoRequest): Promise; + contractHistory(params: QueryContractHistoryRequest): Promise; + contractsByCode(params: QueryContractsByCodeRequest): Promise; + allContractState(params: QueryAllContractStateRequest): Promise; + rawContractState(params: QueryRawContractStateRequest): Promise; + smartContractState(params: QuerySmartContractStateRequest): Promise; + code(params: QueryCodeRequest): Promise; + codes(params?: QueryCodesRequest): Promise; + pinnedCodes(params?: QueryPinnedCodesRequest): Promise; + params(_params?: QueryParamsRequest): Promise; + contractsByCreator(params: QueryContractsByCreatorRequest): Promise; +} diff --git a/packages/codegen/dist/cosmwasm/wasm/v1/query.rpc.Query.d.ts b/packages/codegen/dist/cosmwasm/wasm/v1/query.rpc.Query.d.ts new file mode 100644 index 00000000..afcb1b95 --- /dev/null +++ b/packages/codegen/dist/cosmwasm/wasm/v1/query.rpc.Query.d.ts @@ -0,0 +1,56 @@ +import { Rpc } from "../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryContractInfoRequest, QueryContractInfoResponse, QueryContractHistoryRequest, QueryContractHistoryResponse, QueryContractsByCodeRequest, QueryContractsByCodeResponse, QueryAllContractStateRequest, QueryAllContractStateResponse, QueryRawContractStateRequest, QueryRawContractStateResponse, QuerySmartContractStateRequest, QuerySmartContractStateResponse, QueryCodeRequest, QueryCodeResponse, QueryCodesRequest, QueryCodesResponse, QueryPinnedCodesRequest, QueryPinnedCodesResponse, QueryParamsRequest, QueryParamsResponse, QueryContractsByCreatorRequest, QueryContractsByCreatorResponse } from "./query"; +/** Query provides defines the gRPC querier service */ +export interface Query { + /** ContractInfo gets the contract meta data */ + contractInfo(request: QueryContractInfoRequest): Promise; + /** ContractHistory gets the contract code history */ + contractHistory(request: QueryContractHistoryRequest): Promise; + /** ContractsByCode lists all smart contracts for a code id */ + contractsByCode(request: QueryContractsByCodeRequest): Promise; + /** AllContractState gets all raw store data for a single contract */ + allContractState(request: QueryAllContractStateRequest): Promise; + /** RawContractState gets single key from the raw store data of a contract */ + rawContractState(request: QueryRawContractStateRequest): Promise; + /** SmartContractState get smart query result from the contract */ + smartContractState(request: QuerySmartContractStateRequest): Promise; + /** Code gets the binary code and metadata for a singe wasm code */ + code(request: QueryCodeRequest): Promise; + /** Codes gets the metadata for all stored wasm codes */ + codes(request?: QueryCodesRequest): Promise; + /** PinnedCodes gets the pinned code ids */ + pinnedCodes(request?: QueryPinnedCodesRequest): Promise; + /** Params gets the module params */ + params(request?: QueryParamsRequest): Promise; + /** ContractsByCreator gets the contracts by creator */ + contractsByCreator(request: QueryContractsByCreatorRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + contractInfo(request: QueryContractInfoRequest): Promise; + contractHistory(request: QueryContractHistoryRequest): Promise; + contractsByCode(request: QueryContractsByCodeRequest): Promise; + allContractState(request: QueryAllContractStateRequest): Promise; + rawContractState(request: QueryRawContractStateRequest): Promise; + smartContractState(request: QuerySmartContractStateRequest): Promise; + code(request: QueryCodeRequest): Promise; + codes(request?: QueryCodesRequest): Promise; + pinnedCodes(request?: QueryPinnedCodesRequest): Promise; + params(request?: QueryParamsRequest): Promise; + contractsByCreator(request: QueryContractsByCreatorRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + contractInfo(request: QueryContractInfoRequest): Promise; + contractHistory(request: QueryContractHistoryRequest): Promise; + contractsByCode(request: QueryContractsByCodeRequest): Promise; + allContractState(request: QueryAllContractStateRequest): Promise; + rawContractState(request: QueryRawContractStateRequest): Promise; + smartContractState(request: QuerySmartContractStateRequest): Promise; + code(request: QueryCodeRequest): Promise; + codes(request?: QueryCodesRequest): Promise; + pinnedCodes(request?: QueryPinnedCodesRequest): Promise; + params(request?: QueryParamsRequest): Promise; + contractsByCreator(request: QueryContractsByCreatorRequest): Promise; +}; diff --git a/packages/codegen/dist/cosmwasm/wasm/v1/tx.d.ts b/packages/codegen/dist/cosmwasm/wasm/v1/tx.d.ts new file mode 100644 index 00000000..3b41b3cf --- /dev/null +++ b/packages/codegen/dist/cosmwasm/wasm/v1/tx.d.ts @@ -0,0 +1,294 @@ +/// +import { AccessConfig, AccessConfigSDKType } from "./types"; +import { Coin, CoinSDKType } from "../../../cosmos/base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** MsgStoreCode submit Wasm code to the system */ +export interface MsgStoreCode { + /** Sender is the that actor that signed the messages */ + sender: string; + /** WASMByteCode can be raw or gzip compressed */ + wasmByteCode: Uint8Array; + /** + * InstantiatePermission access control to apply on contract creation, + * optional + */ + instantiatePermission?: AccessConfig; +} +/** MsgStoreCode submit Wasm code to the system */ +export interface MsgStoreCodeSDKType { + sender: string; + wasm_byte_code: Uint8Array; + instantiate_permission?: AccessConfigSDKType; +} +/** MsgStoreCodeResponse returns store result data. */ +export interface MsgStoreCodeResponse { + /** CodeID is the reference to the stored WASM code */ + codeId: Long; + /** Checksum is the sha256 hash of the stored code */ + checksum: Uint8Array; +} +/** MsgStoreCodeResponse returns store result data. */ +export interface MsgStoreCodeResponseSDKType { + code_id: Long; + checksum: Uint8Array; +} +/** + * MsgInstantiateContract create a new smart contract instance for the given + * code id. + */ +export interface MsgInstantiateContract { + /** Sender is the that actor that signed the messages */ + sender: string; + /** Admin is an optional address that can execute migrations */ + admin: string; + /** CodeID is the reference to the stored WASM code */ + codeId: Long; + /** Label is optional metadata to be stored with a contract instance. */ + label: string; + /** Msg json encoded message to be passed to the contract on instantiation */ + msg: Uint8Array; + /** Funds coins that are transferred to the contract on instantiation */ + funds: Coin[]; +} +/** + * MsgInstantiateContract create a new smart contract instance for the given + * code id. + */ +export interface MsgInstantiateContractSDKType { + sender: string; + admin: string; + code_id: Long; + label: string; + msg: Uint8Array; + funds: CoinSDKType[]; +} +/** + * MsgInstantiateContract2 create a new smart contract instance for the given + * code id with a predicable address. + */ +export interface MsgInstantiateContract2 { + /** Sender is the that actor that signed the messages */ + sender: string; + /** Admin is an optional address that can execute migrations */ + admin: string; + /** CodeID is the reference to the stored WASM code */ + codeId: Long; + /** Label is optional metadata to be stored with a contract instance. */ + label: string; + /** Msg json encoded message to be passed to the contract on instantiation */ + msg: Uint8Array; + /** Funds coins that are transferred to the contract on instantiation */ + funds: Coin[]; + /** Salt is an arbitrary value provided by the sender. Size can be 1 to 64. */ + salt: Uint8Array; + /** + * FixMsg include the msg value into the hash for the predictable address. + * Default is false + */ + fixMsg: boolean; +} +/** + * MsgInstantiateContract2 create a new smart contract instance for the given + * code id with a predicable address. + */ +export interface MsgInstantiateContract2SDKType { + sender: string; + admin: string; + code_id: Long; + label: string; + msg: Uint8Array; + funds: CoinSDKType[]; + salt: Uint8Array; + fix_msg: boolean; +} +/** MsgInstantiateContractResponse return instantiation result data */ +export interface MsgInstantiateContractResponse { + /** Address is the bech32 address of the new contract instance. */ + address: string; + /** Data contains bytes to returned from the contract */ + data: Uint8Array; +} +/** MsgInstantiateContractResponse return instantiation result data */ +export interface MsgInstantiateContractResponseSDKType { + address: string; + data: Uint8Array; +} +/** MsgInstantiateContract2Response return instantiation result data */ +export interface MsgInstantiateContract2Response { + /** Address is the bech32 address of the new contract instance. */ + address: string; + /** Data contains bytes to returned from the contract */ + data: Uint8Array; +} +/** MsgInstantiateContract2Response return instantiation result data */ +export interface MsgInstantiateContract2ResponseSDKType { + address: string; + data: Uint8Array; +} +/** MsgExecuteContract submits the given message data to a smart contract */ +export interface MsgExecuteContract { + /** Sender is the that actor that signed the messages */ + sender: string; + /** Contract is the address of the smart contract */ + contract: string; + /** Msg json encoded message to be passed to the contract */ + msg: Uint8Array; + /** Funds coins that are transferred to the contract on execution */ + funds: Coin[]; +} +/** MsgExecuteContract submits the given message data to a smart contract */ +export interface MsgExecuteContractSDKType { + sender: string; + contract: string; + msg: Uint8Array; + funds: CoinSDKType[]; +} +/** MsgExecuteContractResponse returns execution result data. */ +export interface MsgExecuteContractResponse { + /** Data contains bytes to returned from the contract */ + data: Uint8Array; +} +/** MsgExecuteContractResponse returns execution result data. */ +export interface MsgExecuteContractResponseSDKType { + data: Uint8Array; +} +/** MsgMigrateContract runs a code upgrade/ downgrade for a smart contract */ +export interface MsgMigrateContract { + /** Sender is the that actor that signed the messages */ + sender: string; + /** Contract is the address of the smart contract */ + contract: string; + /** CodeID references the new WASM code */ + codeId: Long; + /** Msg json encoded message to be passed to the contract on migration */ + msg: Uint8Array; +} +/** MsgMigrateContract runs a code upgrade/ downgrade for a smart contract */ +export interface MsgMigrateContractSDKType { + sender: string; + contract: string; + code_id: Long; + msg: Uint8Array; +} +/** MsgMigrateContractResponse returns contract migration result data. */ +export interface MsgMigrateContractResponse { + /** + * Data contains same raw bytes returned as data from the wasm contract. + * (May be empty) + */ + data: Uint8Array; +} +/** MsgMigrateContractResponse returns contract migration result data. */ +export interface MsgMigrateContractResponseSDKType { + data: Uint8Array; +} +/** MsgUpdateAdmin sets a new admin for a smart contract */ +export interface MsgUpdateAdmin { + /** Sender is the that actor that signed the messages */ + sender: string; + /** NewAdmin address to be set */ + newAdmin: string; + /** Contract is the address of the smart contract */ + contract: string; +} +/** MsgUpdateAdmin sets a new admin for a smart contract */ +export interface MsgUpdateAdminSDKType { + sender: string; + new_admin: string; + contract: string; +} +/** MsgUpdateAdminResponse returns empty data */ +export interface MsgUpdateAdminResponse { +} +/** MsgUpdateAdminResponse returns empty data */ +export interface MsgUpdateAdminResponseSDKType { +} +/** MsgClearAdmin removes any admin stored for a smart contract */ +export interface MsgClearAdmin { + /** Sender is the that actor that signed the messages */ + sender: string; + /** Contract is the address of the smart contract */ + contract: string; +} +/** MsgClearAdmin removes any admin stored for a smart contract */ +export interface MsgClearAdminSDKType { + sender: string; + contract: string; +} +/** MsgClearAdminResponse returns empty data */ +export interface MsgClearAdminResponse { +} +/** MsgClearAdminResponse returns empty data */ +export interface MsgClearAdminResponseSDKType { +} +export declare const MsgStoreCode: { + encode(message: MsgStoreCode, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgStoreCode; + fromPartial(object: DeepPartial): MsgStoreCode; +}; +export declare const MsgStoreCodeResponse: { + encode(message: MsgStoreCodeResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgStoreCodeResponse; + fromPartial(object: DeepPartial): MsgStoreCodeResponse; +}; +export declare const MsgInstantiateContract: { + encode(message: MsgInstantiateContract, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgInstantiateContract; + fromPartial(object: DeepPartial): MsgInstantiateContract; +}; +export declare const MsgInstantiateContract2: { + encode(message: MsgInstantiateContract2, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgInstantiateContract2; + fromPartial(object: DeepPartial): MsgInstantiateContract2; +}; +export declare const MsgInstantiateContractResponse: { + encode(message: MsgInstantiateContractResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgInstantiateContractResponse; + fromPartial(object: DeepPartial): MsgInstantiateContractResponse; +}; +export declare const MsgInstantiateContract2Response: { + encode(message: MsgInstantiateContract2Response, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgInstantiateContract2Response; + fromPartial(object: DeepPartial): MsgInstantiateContract2Response; +}; +export declare const MsgExecuteContract: { + encode(message: MsgExecuteContract, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExecuteContract; + fromPartial(object: DeepPartial): MsgExecuteContract; +}; +export declare const MsgExecuteContractResponse: { + encode(message: MsgExecuteContractResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExecuteContractResponse; + fromPartial(object: DeepPartial): MsgExecuteContractResponse; +}; +export declare const MsgMigrateContract: { + encode(message: MsgMigrateContract, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgMigrateContract; + fromPartial(object: DeepPartial): MsgMigrateContract; +}; +export declare const MsgMigrateContractResponse: { + encode(message: MsgMigrateContractResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgMigrateContractResponse; + fromPartial(object: DeepPartial): MsgMigrateContractResponse; +}; +export declare const MsgUpdateAdmin: { + encode(message: MsgUpdateAdmin, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateAdmin; + fromPartial(object: DeepPartial): MsgUpdateAdmin; +}; +export declare const MsgUpdateAdminResponse: { + encode(_: MsgUpdateAdminResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateAdminResponse; + fromPartial(_: DeepPartial): MsgUpdateAdminResponse; +}; +export declare const MsgClearAdmin: { + encode(message: MsgClearAdmin, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgClearAdmin; + fromPartial(object: DeepPartial): MsgClearAdmin; +}; +export declare const MsgClearAdminResponse: { + encode(_: MsgClearAdminResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgClearAdminResponse; + fromPartial(_: DeepPartial): MsgClearAdminResponse; +}; diff --git a/packages/codegen/dist/cosmwasm/wasm/v1/tx.rpc.msg.d.ts b/packages/codegen/dist/cosmwasm/wasm/v1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..88d6e1a7 --- /dev/null +++ b/packages/codegen/dist/cosmwasm/wasm/v1/tx.rpc.msg.d.ts @@ -0,0 +1,36 @@ +import { Rpc } from "../../../helpers"; +import { MsgStoreCode, MsgStoreCodeResponse, MsgInstantiateContract, MsgInstantiateContractResponse, MsgInstantiateContract2, MsgInstantiateContract2Response, MsgExecuteContract, MsgExecuteContractResponse, MsgMigrateContract, MsgMigrateContractResponse, MsgUpdateAdmin, MsgUpdateAdminResponse, MsgClearAdmin, MsgClearAdminResponse } from "./tx"; +/** Msg defines the wasm Msg service. */ +export interface Msg { + /** StoreCode to submit Wasm code to the system */ + storeCode(request: MsgStoreCode): Promise; + /** + * InstantiateContract creates a new smart contract instance for the given + * code id. + */ + instantiateContract(request: MsgInstantiateContract): Promise; + /** + * InstantiateContract2 creates a new smart contract instance for the given + * code id with a predictable address + */ + instantiateContract2(request: MsgInstantiateContract2): Promise; + /** Execute submits the given message data to a smart contract */ + executeContract(request: MsgExecuteContract): Promise; + /** Migrate runs a code upgrade/ downgrade for a smart contract */ + migrateContract(request: MsgMigrateContract): Promise; + /** UpdateAdmin sets a new admin for a smart contract */ + updateAdmin(request: MsgUpdateAdmin): Promise; + /** ClearAdmin removes any admin stored for a smart contract */ + clearAdmin(request: MsgClearAdmin): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + storeCode(request: MsgStoreCode): Promise; + instantiateContract(request: MsgInstantiateContract): Promise; + instantiateContract2(request: MsgInstantiateContract2): Promise; + executeContract(request: MsgExecuteContract): Promise; + migrateContract(request: MsgMigrateContract): Promise; + updateAdmin(request: MsgUpdateAdmin): Promise; + clearAdmin(request: MsgClearAdmin): Promise; +} diff --git a/packages/codegen/dist/cosmwasm/wasm/v1/types.d.ts b/packages/codegen/dist/cosmwasm/wasm/v1/types.d.ts new file mode 100644 index 00000000..ef669234 --- /dev/null +++ b/packages/codegen/dist/cosmwasm/wasm/v1/types.d.ts @@ -0,0 +1,206 @@ +/// +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** AccessType permission types */ +export declare enum AccessType { + /** ACCESS_TYPE_UNSPECIFIED - AccessTypeUnspecified placeholder for empty value */ + ACCESS_TYPE_UNSPECIFIED = 0, + /** ACCESS_TYPE_NOBODY - AccessTypeNobody forbidden */ + ACCESS_TYPE_NOBODY = 1, + /** + * ACCESS_TYPE_ONLY_ADDRESS - AccessTypeOnlyAddress restricted to a single address + * Deprecated: use AccessTypeAnyOfAddresses instead + */ + ACCESS_TYPE_ONLY_ADDRESS = 2, + /** ACCESS_TYPE_EVERYBODY - AccessTypeEverybody unrestricted */ + ACCESS_TYPE_EVERYBODY = 3, + /** ACCESS_TYPE_ANY_OF_ADDRESSES - AccessTypeAnyOfAddresses allow any of the addresses */ + ACCESS_TYPE_ANY_OF_ADDRESSES = 4, + UNRECOGNIZED = -1 +} +export declare const AccessTypeSDKType: typeof AccessType; +export declare function accessTypeFromJSON(object: any): AccessType; +export declare function accessTypeToJSON(object: AccessType): string; +/** ContractCodeHistoryOperationType actions that caused a code change */ +export declare enum ContractCodeHistoryOperationType { + /** CONTRACT_CODE_HISTORY_OPERATION_TYPE_UNSPECIFIED - ContractCodeHistoryOperationTypeUnspecified placeholder for empty value */ + CONTRACT_CODE_HISTORY_OPERATION_TYPE_UNSPECIFIED = 0, + /** CONTRACT_CODE_HISTORY_OPERATION_TYPE_INIT - ContractCodeHistoryOperationTypeInit on chain contract instantiation */ + CONTRACT_CODE_HISTORY_OPERATION_TYPE_INIT = 1, + /** CONTRACT_CODE_HISTORY_OPERATION_TYPE_MIGRATE - ContractCodeHistoryOperationTypeMigrate code migration */ + CONTRACT_CODE_HISTORY_OPERATION_TYPE_MIGRATE = 2, + /** CONTRACT_CODE_HISTORY_OPERATION_TYPE_GENESIS - ContractCodeHistoryOperationTypeGenesis based on genesis data */ + CONTRACT_CODE_HISTORY_OPERATION_TYPE_GENESIS = 3, + UNRECOGNIZED = -1 +} +export declare const ContractCodeHistoryOperationTypeSDKType: typeof ContractCodeHistoryOperationType; +export declare function contractCodeHistoryOperationTypeFromJSON(object: any): ContractCodeHistoryOperationType; +export declare function contractCodeHistoryOperationTypeToJSON(object: ContractCodeHistoryOperationType): string; +/** AccessTypeParam */ +export interface AccessTypeParam { + value: AccessType; +} +/** AccessTypeParam */ +export interface AccessTypeParamSDKType { + value: AccessType; +} +/** AccessConfig access control type. */ +export interface AccessConfig { + permission: AccessType; + /** + * Address + * Deprecated: replaced by addresses + */ + address: string; + addresses: string[]; +} +/** AccessConfig access control type. */ +export interface AccessConfigSDKType { + permission: AccessType; + address: string; + addresses: string[]; +} +/** Params defines the set of wasm parameters. */ +export interface Params { + codeUploadAccess?: AccessConfig; + instantiateDefaultPermission: AccessType; +} +/** Params defines the set of wasm parameters. */ +export interface ParamsSDKType { + code_upload_access?: AccessConfigSDKType; + instantiate_default_permission: AccessType; +} +/** CodeInfo is data for the uploaded contract WASM code */ +export interface CodeInfo { + /** CodeHash is the unique identifier created by wasmvm */ + codeHash: Uint8Array; + /** Creator address who initially stored the code */ + creator: string; + /** InstantiateConfig access control to apply on contract creation, optional */ + instantiateConfig?: AccessConfig; +} +/** CodeInfo is data for the uploaded contract WASM code */ +export interface CodeInfoSDKType { + code_hash: Uint8Array; + creator: string; + instantiate_config?: AccessConfigSDKType; +} +/** ContractInfo stores a WASM contract instance */ +export interface ContractInfo { + /** CodeID is the reference to the stored Wasm code */ + codeId: Long; + /** Creator address who initially instantiated the contract */ + creator: string; + /** Admin is an optional address that can execute migrations */ + admin: string; + /** Label is optional metadata to be stored with a contract instance. */ + label: string; + /** Created Tx position when the contract was instantiated. */ + created?: AbsoluteTxPosition; + ibcPortId: string; + /** + * Extension is an extension point to store custom metadata within the + * persistence model. + */ + extension?: Any; +} +/** ContractInfo stores a WASM contract instance */ +export interface ContractInfoSDKType { + code_id: Long; + creator: string; + admin: string; + label: string; + created?: AbsoluteTxPositionSDKType; + ibc_port_id: string; + extension?: AnySDKType; +} +/** ContractCodeHistoryEntry metadata to a contract. */ +export interface ContractCodeHistoryEntry { + operation: ContractCodeHistoryOperationType; + /** CodeID is the reference to the stored WASM code */ + codeId: Long; + /** Updated Tx position when the operation was executed. */ + updated?: AbsoluteTxPosition; + msg: Uint8Array; +} +/** ContractCodeHistoryEntry metadata to a contract. */ +export interface ContractCodeHistoryEntrySDKType { + operation: ContractCodeHistoryOperationType; + code_id: Long; + updated?: AbsoluteTxPositionSDKType; + msg: Uint8Array; +} +/** + * AbsoluteTxPosition is a unique transaction position that allows for global + * ordering of transactions. + */ +export interface AbsoluteTxPosition { + /** BlockHeight is the block the contract was created at */ + blockHeight: Long; + /** + * TxIndex is a monotonic counter within the block (actual transaction index, + * or gas consumed) + */ + txIndex: Long; +} +/** + * AbsoluteTxPosition is a unique transaction position that allows for global + * ordering of transactions. + */ +export interface AbsoluteTxPositionSDKType { + block_height: Long; + tx_index: Long; +} +/** Model is a struct that holds a KV pair */ +export interface Model { + /** hex-encode key to read it better (this is often ascii) */ + key: Uint8Array; + /** base64-encode raw value */ + value: Uint8Array; +} +/** Model is a struct that holds a KV pair */ +export interface ModelSDKType { + key: Uint8Array; + value: Uint8Array; +} +export declare const AccessTypeParam: { + encode(message: AccessTypeParam, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AccessTypeParam; + fromPartial(object: DeepPartial): AccessTypeParam; +}; +export declare const AccessConfig: { + encode(message: AccessConfig, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AccessConfig; + fromPartial(object: DeepPartial): AccessConfig; +}; +export declare const Params: { + encode(message: Params, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Params; + fromPartial(object: DeepPartial): Params; +}; +export declare const CodeInfo: { + encode(message: CodeInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CodeInfo; + fromPartial(object: DeepPartial): CodeInfo; +}; +export declare const ContractInfo: { + encode(message: ContractInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ContractInfo; + fromPartial(object: DeepPartial): ContractInfo; +}; +export declare const ContractCodeHistoryEntry: { + encode(message: ContractCodeHistoryEntry, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ContractCodeHistoryEntry; + fromPartial(object: DeepPartial): ContractCodeHistoryEntry; +}; +export declare const AbsoluteTxPosition: { + encode(message: AbsoluteTxPosition, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): AbsoluteTxPosition; + fromPartial(object: DeepPartial): AbsoluteTxPosition; +}; +export declare const Model: { + encode(message: Model, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Model; + fromPartial(object: DeepPartial): Model; +}; diff --git a/packages/codegen/dist/gogoproto/bundle.d.ts b/packages/codegen/dist/gogoproto/bundle.d.ts new file mode 100644 index 00000000..55791e68 --- /dev/null +++ b/packages/codegen/dist/gogoproto/bundle.d.ts @@ -0,0 +1 @@ +export declare const gogoproto: {}; diff --git a/packages/codegen/dist/gogoproto/gogo.d.ts b/packages/codegen/dist/gogoproto/gogo.d.ts new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/packages/codegen/dist/gogoproto/gogo.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/packages/codegen/dist/google/api/annotations.d.ts b/packages/codegen/dist/google/api/annotations.d.ts new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/packages/codegen/dist/google/api/annotations.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/packages/codegen/dist/google/api/http.d.ts b/packages/codegen/dist/google/api/http.d.ts new file mode 100644 index 00000000..7c8bfc6c --- /dev/null +++ b/packages/codegen/dist/google/api/http.d.ts @@ -0,0 +1,666 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parameters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface HttpSDKType { + rules: HttpRuleSDKType[]; + fully_decode_reserved_expansion: boolean; +} +/** + * # gRPC Transcoding + * + * gRPC Transcoding is a feature for mapping between a gRPC method and one or + * more HTTP REST endpoints. It allows developers to build a single API service + * that supports both gRPC APIs and REST APIs. Many systems, including [Google + * APIs](https://github.com/googleapis/googleapis), + * [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC + * Gateway](https://github.com/grpc-ecosystem/grpc-gateway), + * and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature + * and use it for large scale production services. + * + * `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies + * how different portions of the gRPC request message are mapped to the URL + * path, URL query parameters, and HTTP request body. It also controls how the + * gRPC response message is mapped to the HTTP response body. `HttpRule` is + * typically specified as an `google.api.http` annotation on the gRPC method. + * + * Each mapping specifies a URL path template and an HTTP method. The path + * template may refer to one or more fields in the gRPC request message, as long + * as each field is a non-repeated field with a primitive (non-message) type. + * The path template controls how fields of the request message are mapped to + * the URL path. + * + * Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/{name=messages/*}" + * }; + * } + * } + * message GetMessageRequest { + * string name = 1; // Mapped to URL path. + * } + * message Message { + * string text = 1; // The resource content. + * } + * + * This enables an HTTP REST to gRPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` + * + * Any fields in the request message which are not bound by the path template + * automatically become HTTP query parameters if there is no HTTP request body. + * For example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get:"/v1/messages/{message_id}" + * }; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // Mapped to URL path. + * int64 revision = 2; // Mapped to URL query parameter `revision`. + * SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | + * `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: + * "foo"))` + * + * Note that fields which are mapped to URL query parameters must have a + * primitive type or a repeated primitive type or a non-repeated message type. + * In the case of a repeated type, the parameter can be repeated in the URL + * as `...?param=A¶m=B`. In the case of a message type, each field of the + * message is mapped to a separate parameter, such as + * `...?foo.a=A&foo.b=B&foo.c=C`. + * + * For HTTP methods that allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice when + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC mappings: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: + * "123456")` + * + * ## Rules for HTTP mapping + * + * 1. Leaf request fields (recursive expansion nested messages in the request + * message) are classified into three categories: + * - Fields referred by the path template. They are passed via the URL path. + * - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP + * request body. + * - All other fields are passed via the URL query parameters, and the + * parameter name is the field path in the request message. A repeated + * field can be represented as multiple query parameters under the same + * name. + * 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields + * are passed via URL path and HTTP request body. + * 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all + * fields are passed via URL path and URL query parameters. + * + * ### Path template syntax + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single URL path segment. The syntax `**` matches + * zero or more URL path segments, which must be the last part of the URL path + * except the `Verb`. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` + * contains any reserved character, such characters should be percent-encoded + * before the matching. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path on the client + * side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The + * server side does the reverse decoding. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{var}`. + * + * If a variable contains multiple path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path on the + * client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. + * The server side does the reverse decoding, except "%2F" and "%2f" are left + * unchanged. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{+var}`. + * + * ## Using gRPC API Service Configuration + * + * gRPC API Service Configuration (service config) is a configuration language + * for configuring a gRPC service to become a user-facing product. The + * service config is simply the YAML representation of the `google.api.Service` + * proto message. + * + * As an alternative to annotating your proto file, you can configure gRPC + * transcoding in your service config YAML files. You do this by specifying a + * `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same + * effect as the proto annotation. This can be particularly useful if you + * have a proto that is reused in multiple services. Note that any transcoding + * specified in the service config will override any matching transcoding + * configuration in the proto. + * + * Example: + * + * http: + * rules: + * # Selects a gRPC method and applies HttpRule to it. + * - selector: example.v1.Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * ## Special notes + * + * When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the + * proto to JSON conversion must follow the [proto3 + * specification](https://developers.google.com/protocol-buffers/docs/proto3#json). + * + * While the single segment variable follows the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String + * Expansion, the multi segment variable **does not** follow RFC 6570 Section + * 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. As the result, gRPC Transcoding uses a custom encoding + * for multi segment variables. + * + * The path variables **must not** refer to any repeated or mapped field, + * because client libraries are not capable of handling such variable expansion. + * + * The path variables **must not** capture the leading "/" character. The reason + * is that the most common use case "{var}" does not capture the leading "/" + * character. For consistency, all path variables must share the same behavior. + * + * Repeated message fields must not be mapped to URL query parameters, because + * no client library can support such complicated mapping. + * + * If an API needs to use a JSON array for request or response body, it can map + * the request or response body to a repeated field. However, some gRPC + * Transcoding implementations may not support this feature. + */ +export interface HttpRule { + /** + * Selects a method to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** + * Maps to HTTP GET. Used for listing and getting information about + * resources. + */ + get?: string; + /** Maps to HTTP PUT. Used for replacing a resource. */ + put?: string; + /** Maps to HTTP POST. Used for creating a resource or performing an action. */ + post?: string; + /** Maps to HTTP DELETE. Used for deleting a resource. */ + delete?: string; + /** Maps to HTTP PATCH. Used for updating a resource. */ + patch?: string; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom?: CustomHttpPattern; + /** + * The name of the request field whose value is mapped to the HTTP request + * body, or `*` for mapping all request fields not captured by the path + * pattern to the HTTP body, or omitted for not having any HTTP request body. + * + * NOTE: the referred field must be present at the top-level of the request + * message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * response body. When omitted, the entire response message will be used + * as the HTTP response body. + * + * NOTE: The referred field must be present at the top-level of the response + * message type. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} +/** + * # gRPC Transcoding + * + * gRPC Transcoding is a feature for mapping between a gRPC method and one or + * more HTTP REST endpoints. It allows developers to build a single API service + * that supports both gRPC APIs and REST APIs. Many systems, including [Google + * APIs](https://github.com/googleapis/googleapis), + * [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC + * Gateway](https://github.com/grpc-ecosystem/grpc-gateway), + * and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature + * and use it for large scale production services. + * + * `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies + * how different portions of the gRPC request message are mapped to the URL + * path, URL query parameters, and HTTP request body. It also controls how the + * gRPC response message is mapped to the HTTP response body. `HttpRule` is + * typically specified as an `google.api.http` annotation on the gRPC method. + * + * Each mapping specifies a URL path template and an HTTP method. The path + * template may refer to one or more fields in the gRPC request message, as long + * as each field is a non-repeated field with a primitive (non-message) type. + * The path template controls how fields of the request message are mapped to + * the URL path. + * + * Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/{name=messages/*}" + * }; + * } + * } + * message GetMessageRequest { + * string name = 1; // Mapped to URL path. + * } + * message Message { + * string text = 1; // The resource content. + * } + * + * This enables an HTTP REST to gRPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` + * + * Any fields in the request message which are not bound by the path template + * automatically become HTTP query parameters if there is no HTTP request body. + * For example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get:"/v1/messages/{message_id}" + * }; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // Mapped to URL path. + * int64 revision = 2; // Mapped to URL query parameter `revision`. + * SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | + * `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: + * "foo"))` + * + * Note that fields which are mapped to URL query parameters must have a + * primitive type or a repeated primitive type or a non-repeated message type. + * In the case of a repeated type, the parameter can be repeated in the URL + * as `...?param=A¶m=B`. In the case of a message type, each field of the + * message is mapped to a separate parameter, such as + * `...?foo.a=A&foo.b=B&foo.c=C`. + * + * For HTTP methods that allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice when + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC mappings: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: + * "123456")` + * + * ## Rules for HTTP mapping + * + * 1. Leaf request fields (recursive expansion nested messages in the request + * message) are classified into three categories: + * - Fields referred by the path template. They are passed via the URL path. + * - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP + * request body. + * - All other fields are passed via the URL query parameters, and the + * parameter name is the field path in the request message. A repeated + * field can be represented as multiple query parameters under the same + * name. + * 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields + * are passed via URL path and HTTP request body. + * 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all + * fields are passed via URL path and URL query parameters. + * + * ### Path template syntax + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single URL path segment. The syntax `**` matches + * zero or more URL path segments, which must be the last part of the URL path + * except the `Verb`. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` + * contains any reserved character, such characters should be percent-encoded + * before the matching. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path on the client + * side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The + * server side does the reverse decoding. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{var}`. + * + * If a variable contains multiple path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path on the + * client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. + * The server side does the reverse decoding, except "%2F" and "%2f" are left + * unchanged. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{+var}`. + * + * ## Using gRPC API Service Configuration + * + * gRPC API Service Configuration (service config) is a configuration language + * for configuring a gRPC service to become a user-facing product. The + * service config is simply the YAML representation of the `google.api.Service` + * proto message. + * + * As an alternative to annotating your proto file, you can configure gRPC + * transcoding in your service config YAML files. You do this by specifying a + * `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same + * effect as the proto annotation. This can be particularly useful if you + * have a proto that is reused in multiple services. Note that any transcoding + * specified in the service config will override any matching transcoding + * configuration in the proto. + * + * Example: + * + * http: + * rules: + * # Selects a gRPC method and applies HttpRule to it. + * - selector: example.v1.Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * ## Special notes + * + * When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the + * proto to JSON conversion must follow the [proto3 + * specification](https://developers.google.com/protocol-buffers/docs/proto3#json). + * + * While the single segment variable follows the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String + * Expansion, the multi segment variable **does not** follow RFC 6570 Section + * 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. As the result, gRPC Transcoding uses a custom encoding + * for multi segment variables. + * + * The path variables **must not** refer to any repeated or mapped field, + * because client libraries are not capable of handling such variable expansion. + * + * The path variables **must not** capture the leading "/" character. The reason + * is that the most common use case "{var}" does not capture the leading "/" + * character. For consistency, all path variables must share the same behavior. + * + * Repeated message fields must not be mapped to URL query parameters, because + * no client library can support such complicated mapping. + * + * If an API needs to use a JSON array for request or response body, it can map + * the request or response body to a repeated field. However, some gRPC + * Transcoding implementations may not support this feature. + */ +export interface HttpRuleSDKType { + selector: string; + get?: string; + put?: string; + post?: string; + delete?: string; + patch?: string; + custom?: CustomHttpPatternSDKType; + body: string; + response_body: string; + additional_bindings: HttpRuleSDKType[]; +} +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPatternSDKType { + kind: string; + path: string; +} +export declare const Http: { + encode(message: Http, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Http; + fromPartial(object: DeepPartial): Http; +}; +export declare const HttpRule: { + encode(message: HttpRule, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule; + fromPartial(object: DeepPartial): HttpRule; +}; +export declare const CustomHttpPattern: { + encode(message: CustomHttpPattern, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern; + fromPartial(object: DeepPartial): CustomHttpPattern; +}; diff --git a/packages/codegen/dist/google/bundle.d.ts b/packages/codegen/dist/google/bundle.d.ts new file mode 100644 index 00000000..a38c0e4e --- /dev/null +++ b/packages/codegen/dist/google/bundle.d.ts @@ -0,0 +1,1466 @@ +/// +import * as _104 from "./api/http"; +import * as _105 from "./protobuf/any"; +import * as _106 from "./protobuf/descriptor"; +import * as _107 from "./protobuf/duration"; +import * as _108 from "./protobuf/empty"; +import * as _109 from "./protobuf/timestamp"; +export declare namespace google { + const api: { + Http: { + encode(message: _104.Http, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _104.Http; + fromPartial(object: { + rules?: { + selector?: string; + get?: string; + put?: string; + post?: string; + delete?: string; + patch?: string; + custom?: { + kind?: string; + path?: string; + }; + body?: string; + responseBody?: string; + additionalBindings?: any[]; + }[]; + fullyDecodeReservedExpansion?: boolean; + }): _104.Http; + }; + HttpRule: { + encode(message: _104.HttpRule, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _104.HttpRule; + fromPartial(object: { + selector?: string; + get?: string; + put?: string; + post?: string; + delete?: string; + patch?: string; + custom?: { + kind?: string; + path?: string; + }; + body?: string; + responseBody?: string; + additionalBindings?: any[]; + }): _104.HttpRule; + }; + CustomHttpPattern: { + encode(message: _104.CustomHttpPattern, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _104.CustomHttpPattern; + fromPartial(object: { + kind?: string; + path?: string; + }): _104.CustomHttpPattern; + }; + }; + const protobuf: { + Timestamp: { + encode(message: _109.Timestamp, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _109.Timestamp; + fromPartial(object: { + seconds?: string | number | import("long").Long; + nanos?: number; + }): _109.Timestamp; + }; + Empty: { + encode(_: _108.Empty, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _108.Empty; + fromPartial(_: {}): _108.Empty; + }; + Duration: { + encode(message: _107.Duration, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _107.Duration; + fromPartial(object: { + seconds?: string | number | import("long").Long; + nanos?: number; + }): _107.Duration; + }; + fieldDescriptorProto_TypeFromJSON(object: any): _106.FieldDescriptorProto_Type; + fieldDescriptorProto_TypeToJSON(object: _106.FieldDescriptorProto_Type): string; + fieldDescriptorProto_LabelFromJSON(object: any): _106.FieldDescriptorProto_Label; + fieldDescriptorProto_LabelToJSON(object: _106.FieldDescriptorProto_Label): string; + fileOptions_OptimizeModeFromJSON(object: any): _106.FileOptions_OptimizeMode; + fileOptions_OptimizeModeToJSON(object: _106.FileOptions_OptimizeMode): string; + fieldOptions_CTypeFromJSON(object: any): _106.FieldOptions_CType; + fieldOptions_CTypeToJSON(object: _106.FieldOptions_CType): string; + fieldOptions_JSTypeFromJSON(object: any): _106.FieldOptions_JSType; + fieldOptions_JSTypeToJSON(object: _106.FieldOptions_JSType): string; + methodOptions_IdempotencyLevelFromJSON(object: any): _106.MethodOptions_IdempotencyLevel; + methodOptions_IdempotencyLevelToJSON(object: _106.MethodOptions_IdempotencyLevel): string; + FieldDescriptorProto_Type: typeof _106.FieldDescriptorProto_Type; + FieldDescriptorProto_TypeSDKType: typeof _106.FieldDescriptorProto_Type; + FieldDescriptorProto_Label: typeof _106.FieldDescriptorProto_Label; + FieldDescriptorProto_LabelSDKType: typeof _106.FieldDescriptorProto_Label; + FileOptions_OptimizeMode: typeof _106.FileOptions_OptimizeMode; + FileOptions_OptimizeModeSDKType: typeof _106.FileOptions_OptimizeMode; + FieldOptions_CType: typeof _106.FieldOptions_CType; + FieldOptions_CTypeSDKType: typeof _106.FieldOptions_CType; + FieldOptions_JSType: typeof _106.FieldOptions_JSType; + FieldOptions_JSTypeSDKType: typeof _106.FieldOptions_JSType; + MethodOptions_IdempotencyLevel: typeof _106.MethodOptions_IdempotencyLevel; + MethodOptions_IdempotencyLevelSDKType: typeof _106.MethodOptions_IdempotencyLevel; + FileDescriptorSet: { + encode(message: _106.FileDescriptorSet, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.FileDescriptorSet; + fromPartial(object: { + file?: { + name?: string; + package?: string; + dependency?: string[]; + publicDependency?: number[]; + weakDependency?: number[]; + messageType?: { + name?: string; + field?: { + name?: string; + number?: number; + label?: _106.FieldDescriptorProto_Label; + type?: _106.FieldDescriptorProto_Type; + typeName?: string; + extendee?: string; + defaultValue?: string; + oneofIndex?: number; + jsonName?: string; + options?: { + ctype?: _106.FieldOptions_CType; + packed?: boolean; + jstype?: _106.FieldOptions_JSType; + lazy?: boolean; + deprecated?: boolean; + weak?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + extension?: { + name?: string; + number?: number; + label?: _106.FieldDescriptorProto_Label; + type?: _106.FieldDescriptorProto_Type; + typeName?: string; + extendee?: string; + defaultValue?: string; + oneofIndex?: number; + jsonName?: string; + options?: { + ctype?: _106.FieldOptions_CType; + packed?: boolean; + jstype?: _106.FieldOptions_JSType; + lazy?: boolean; + deprecated?: boolean; + weak?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + nestedType?: any[]; + enumType?: { + name?: string; + value?: { + name?: string; + number?: number; + options?: { + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + options?: { + allowAlias?: boolean; + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + reservedRange?: { + start?: number; + end?: number; + }[]; + reservedName?: string[]; + }[]; + extensionRange?: { + start?: number; + end?: number; + options?: { + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + oneofDecl?: { + name?: string; + options?: { + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + options?: { + messageSetWireFormat?: boolean; + noStandardDescriptorAccessor?: boolean; + deprecated?: boolean; + mapEntry?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + reservedRange?: { + start?: number; + end?: number; + }[]; + reservedName?: string[]; + }[]; + enumType?: { + name?: string; + value?: { + name?: string; + number?: number; + options?: { + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + options?: { + allowAlias?: boolean; + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + reservedRange?: { + start?: number; + end?: number; + }[]; + reservedName?: string[]; + }[]; + service?: { + name?: string; + method?: { + name?: string; + inputType?: string; + outputType?: string; + options?: { + deprecated?: boolean; + idempotencyLevel?: _106.MethodOptions_IdempotencyLevel; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + clientStreaming?: boolean; + serverStreaming?: boolean; + }[]; + options?: { + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + extension?: { + name?: string; + number?: number; + label?: _106.FieldDescriptorProto_Label; + type?: _106.FieldDescriptorProto_Type; + typeName?: string; + extendee?: string; + defaultValue?: string; + oneofIndex?: number; + jsonName?: string; + options?: { + ctype?: _106.FieldOptions_CType; + packed?: boolean; + jstype?: _106.FieldOptions_JSType; + lazy?: boolean; + deprecated?: boolean; + weak?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + options?: { + javaPackage?: string; + javaOuterClassname?: string; + javaMultipleFiles?: boolean; + javaGenerateEqualsAndHash?: boolean; + javaStringCheckUtf8?: boolean; + optimizeFor?: _106.FileOptions_OptimizeMode; + goPackage?: string; + ccGenericServices?: boolean; + javaGenericServices?: boolean; + pyGenericServices?: boolean; + phpGenericServices?: boolean; + deprecated?: boolean; + ccEnableArenas?: boolean; + objcClassPrefix?: string; + csharpNamespace?: string; + swiftPrefix?: string; + phpClassPrefix?: string; + phpNamespace?: string; + phpMetadataNamespace?: string; + rubyPackage?: string; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + sourceCodeInfo?: { + location?: { + path?: number[]; + span?: number[]; + leadingComments?: string; + trailingComments?: string; + leadingDetachedComments?: string[]; + }[]; + }; + syntax?: string; + }[]; + }): _106.FileDescriptorSet; + }; + FileDescriptorProto: { + encode(message: _106.FileDescriptorProto, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.FileDescriptorProto; + fromPartial(object: { + name?: string; + package?: string; + dependency?: string[]; + publicDependency?: number[]; + weakDependency?: number[]; + messageType?: { + name?: string; + field?: { + name?: string; + number?: number; + label?: _106.FieldDescriptorProto_Label; + type?: _106.FieldDescriptorProto_Type; + typeName?: string; + extendee?: string; + defaultValue?: string; + oneofIndex?: number; + jsonName?: string; + options?: { + ctype?: _106.FieldOptions_CType; + packed?: boolean; + jstype?: _106.FieldOptions_JSType; + lazy?: boolean; + deprecated?: boolean; + weak?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + extension?: { + name?: string; + number?: number; + label?: _106.FieldDescriptorProto_Label; + type?: _106.FieldDescriptorProto_Type; + typeName?: string; + extendee?: string; + defaultValue?: string; + oneofIndex?: number; + jsonName?: string; + options?: { + ctype?: _106.FieldOptions_CType; + packed?: boolean; + jstype?: _106.FieldOptions_JSType; + lazy?: boolean; + deprecated?: boolean; + weak?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + nestedType?: any[]; + enumType?: { + name?: string; + value?: { + name?: string; + number?: number; + options?: { + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + options?: { + allowAlias?: boolean; + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + reservedRange?: { + start?: number; + end?: number; + }[]; + reservedName?: string[]; + }[]; + extensionRange?: { + start?: number; + end?: number; + options?: { + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + oneofDecl?: { + name?: string; + options?: { + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + options?: { + messageSetWireFormat?: boolean; + noStandardDescriptorAccessor?: boolean; + deprecated?: boolean; + mapEntry?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + reservedRange?: { + start?: number; + end?: number; + }[]; + reservedName?: string[]; + }[]; + enumType?: { + name?: string; + value?: { + name?: string; + number?: number; + options?: { + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + options?: { + allowAlias?: boolean; + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + reservedRange?: { + start?: number; + end?: number; + }[]; + reservedName?: string[]; + }[]; + service?: { + name?: string; + method?: { + name?: string; + inputType?: string; + outputType?: string; + options?: { + deprecated?: boolean; + idempotencyLevel?: _106.MethodOptions_IdempotencyLevel; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + clientStreaming?: boolean; + serverStreaming?: boolean; + }[]; + options?: { + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + extension?: { + name?: string; + number?: number; + label?: _106.FieldDescriptorProto_Label; + type?: _106.FieldDescriptorProto_Type; + typeName?: string; + extendee?: string; + defaultValue?: string; + oneofIndex?: number; + jsonName?: string; + options?: { + ctype?: _106.FieldOptions_CType; + packed?: boolean; + jstype?: _106.FieldOptions_JSType; + lazy?: boolean; + deprecated?: boolean; + weak?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + options?: { + javaPackage?: string; + javaOuterClassname?: string; + javaMultipleFiles?: boolean; + javaGenerateEqualsAndHash?: boolean; + javaStringCheckUtf8?: boolean; + optimizeFor?: _106.FileOptions_OptimizeMode; + goPackage?: string; + ccGenericServices?: boolean; + javaGenericServices?: boolean; + pyGenericServices?: boolean; + phpGenericServices?: boolean; + deprecated?: boolean; + ccEnableArenas?: boolean; + objcClassPrefix?: string; + csharpNamespace?: string; + swiftPrefix?: string; + phpClassPrefix?: string; + phpNamespace?: string; + phpMetadataNamespace?: string; + rubyPackage?: string; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + sourceCodeInfo?: { + location?: { + path?: number[]; + span?: number[]; + leadingComments?: string; + trailingComments?: string; + leadingDetachedComments?: string[]; + }[]; + }; + syntax?: string; + }): _106.FileDescriptorProto; + }; + DescriptorProto: { + encode(message: _106.DescriptorProto, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.DescriptorProto; + fromPartial(object: { + name?: string; + field?: { + name?: string; + number?: number; + label?: _106.FieldDescriptorProto_Label; + type?: _106.FieldDescriptorProto_Type; + typeName?: string; + extendee?: string; + defaultValue?: string; + oneofIndex?: number; + jsonName?: string; + options?: { + ctype?: _106.FieldOptions_CType; + packed?: boolean; + jstype?: _106.FieldOptions_JSType; + lazy?: boolean; + deprecated?: boolean; + weak?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + extension?: { + name?: string; + number?: number; + label?: _106.FieldDescriptorProto_Label; + type?: _106.FieldDescriptorProto_Type; + typeName?: string; + extendee?: string; + defaultValue?: string; + oneofIndex?: number; + jsonName?: string; + options?: { + ctype?: _106.FieldOptions_CType; + packed?: boolean; + jstype?: _106.FieldOptions_JSType; + lazy?: boolean; + deprecated?: boolean; + weak?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + nestedType?: any[]; + enumType?: { + name?: string; + value?: { + name?: string; + number?: number; + options?: { + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + options?: { + allowAlias?: boolean; + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + reservedRange?: { + start?: number; + end?: number; + }[]; + reservedName?: string[]; + }[]; + extensionRange?: { + start?: number; + end?: number; + options?: { + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + oneofDecl?: { + name?: string; + options?: { + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + options?: { + messageSetWireFormat?: boolean; + noStandardDescriptorAccessor?: boolean; + deprecated?: boolean; + mapEntry?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + reservedRange?: { + start?: number; + end?: number; + }[]; + reservedName?: string[]; + }): _106.DescriptorProto; + }; + DescriptorProto_ExtensionRange: { + encode(message: _106.DescriptorProto_ExtensionRange, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.DescriptorProto_ExtensionRange; + fromPartial(object: { + start?: number; + end?: number; + options?: { + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }): _106.DescriptorProto_ExtensionRange; + }; + DescriptorProto_ReservedRange: { + encode(message: _106.DescriptorProto_ReservedRange, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.DescriptorProto_ReservedRange; + fromPartial(object: { + start?: number; + end?: number; + }): _106.DescriptorProto_ReservedRange; + }; + ExtensionRangeOptions: { + encode(message: _106.ExtensionRangeOptions, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.ExtensionRangeOptions; + fromPartial(object: { + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }): _106.ExtensionRangeOptions; + }; + FieldDescriptorProto: { + encode(message: _106.FieldDescriptorProto, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.FieldDescriptorProto; + fromPartial(object: { + name?: string; + number?: number; + label?: _106.FieldDescriptorProto_Label; + type?: _106.FieldDescriptorProto_Type; + typeName?: string; + extendee?: string; + defaultValue?: string; + oneofIndex?: number; + jsonName?: string; + options?: { + ctype?: _106.FieldOptions_CType; + packed?: boolean; + jstype?: _106.FieldOptions_JSType; + lazy?: boolean; + deprecated?: boolean; + weak?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }): _106.FieldDescriptorProto; + }; + OneofDescriptorProto: { + encode(message: _106.OneofDescriptorProto, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.OneofDescriptorProto; + fromPartial(object: { + name?: string; + options?: { + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }): _106.OneofDescriptorProto; + }; + EnumDescriptorProto: { + encode(message: _106.EnumDescriptorProto, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.EnumDescriptorProto; + fromPartial(object: { + name?: string; + value?: { + name?: string; + number?: number; + options?: { + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }[]; + options?: { + allowAlias?: boolean; + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + reservedRange?: { + start?: number; + end?: number; + }[]; + reservedName?: string[]; + }): _106.EnumDescriptorProto; + }; + EnumDescriptorProto_EnumReservedRange: { + encode(message: _106.EnumDescriptorProto_EnumReservedRange, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.EnumDescriptorProto_EnumReservedRange; + fromPartial(object: { + start?: number; + end?: number; + }): _106.EnumDescriptorProto_EnumReservedRange; + }; + EnumValueDescriptorProto: { + encode(message: _106.EnumValueDescriptorProto, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.EnumValueDescriptorProto; + fromPartial(object: { + name?: string; + number?: number; + options?: { + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }): _106.EnumValueDescriptorProto; + }; + ServiceDescriptorProto: { + encode(message: _106.ServiceDescriptorProto, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.ServiceDescriptorProto; + fromPartial(object: { + name?: string; + method?: { + name?: string; + inputType?: string; + outputType?: string; + options?: { + deprecated?: boolean; + idempotencyLevel?: _106.MethodOptions_IdempotencyLevel; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + clientStreaming?: boolean; + serverStreaming?: boolean; + }[]; + options?: { + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + }): _106.ServiceDescriptorProto; + }; + MethodDescriptorProto: { + encode(message: _106.MethodDescriptorProto, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.MethodDescriptorProto; + fromPartial(object: { + name?: string; + inputType?: string; + outputType?: string; + options?: { + deprecated?: boolean; + idempotencyLevel?: _106.MethodOptions_IdempotencyLevel; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }; + clientStreaming?: boolean; + serverStreaming?: boolean; + }): _106.MethodDescriptorProto; + }; + FileOptions: { + encode(message: _106.FileOptions, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.FileOptions; + fromPartial(object: { + javaPackage?: string; + javaOuterClassname?: string; + javaMultipleFiles?: boolean; + javaGenerateEqualsAndHash?: boolean; + javaStringCheckUtf8?: boolean; + optimizeFor?: _106.FileOptions_OptimizeMode; + goPackage?: string; + ccGenericServices?: boolean; + javaGenericServices?: boolean; + pyGenericServices?: boolean; + phpGenericServices?: boolean; + deprecated?: boolean; + ccEnableArenas?: boolean; + objcClassPrefix?: string; + csharpNamespace?: string; + swiftPrefix?: string; + phpClassPrefix?: string; + phpNamespace?: string; + phpMetadataNamespace?: string; + rubyPackage?: string; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }): _106.FileOptions; + }; + MessageOptions: { + encode(message: _106.MessageOptions, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.MessageOptions; + fromPartial(object: { + messageSetWireFormat?: boolean; + noStandardDescriptorAccessor?: boolean; + deprecated?: boolean; + mapEntry?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }): _106.MessageOptions; + }; + FieldOptions: { + encode(message: _106.FieldOptions, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.FieldOptions; + fromPartial(object: { + ctype?: _106.FieldOptions_CType; + packed?: boolean; + jstype?: _106.FieldOptions_JSType; + lazy?: boolean; + deprecated?: boolean; + weak?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }): _106.FieldOptions; + }; + OneofOptions: { + encode(message: _106.OneofOptions, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.OneofOptions; + fromPartial(object: { + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }): _106.OneofOptions; + }; + EnumOptions: { + encode(message: _106.EnumOptions, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.EnumOptions; + fromPartial(object: { + allowAlias?: boolean; + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }): _106.EnumOptions; + }; + EnumValueOptions: { + encode(message: _106.EnumValueOptions, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.EnumValueOptions; + fromPartial(object: { + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }): _106.EnumValueOptions; + }; + ServiceOptions: { + encode(message: _106.ServiceOptions, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.ServiceOptions; + fromPartial(object: { + deprecated?: boolean; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }): _106.ServiceOptions; + }; + MethodOptions: { + encode(message: _106.MethodOptions, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.MethodOptions; + fromPartial(object: { + deprecated?: boolean; + idempotencyLevel?: _106.MethodOptions_IdempotencyLevel; + uninterpretedOption?: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }[]; + }): _106.MethodOptions; + }; + UninterpretedOption: { + encode(message: _106.UninterpretedOption, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.UninterpretedOption; + fromPartial(object: { + name?: { + namePart?: string; + isExtension?: boolean; + }[]; + identifierValue?: string; + positiveIntValue?: string | number | import("long").Long; + negativeIntValue?: string | number | import("long").Long; + doubleValue?: number; + stringValue?: Uint8Array; + aggregateValue?: string; + }): _106.UninterpretedOption; + }; + UninterpretedOption_NamePart: { + encode(message: _106.UninterpretedOption_NamePart, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.UninterpretedOption_NamePart; + fromPartial(object: { + namePart?: string; + isExtension?: boolean; + }): _106.UninterpretedOption_NamePart; + }; + SourceCodeInfo: { + encode(message: _106.SourceCodeInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.SourceCodeInfo; + fromPartial(object: { + location?: { + path?: number[]; + span?: number[]; + leadingComments?: string; + trailingComments?: string; + leadingDetachedComments?: string[]; + }[]; + }): _106.SourceCodeInfo; + }; + SourceCodeInfo_Location: { + encode(message: _106.SourceCodeInfo_Location, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.SourceCodeInfo_Location; + fromPartial(object: { + path?: number[]; + span?: number[]; + leadingComments?: string; + trailingComments?: string; + leadingDetachedComments?: string[]; + }): _106.SourceCodeInfo_Location; + }; + GeneratedCodeInfo: { + encode(message: _106.GeneratedCodeInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.GeneratedCodeInfo; + fromPartial(object: { + annotation?: { + path?: number[]; + sourceFile?: string; + begin?: number; + end?: number; + }[]; + }): _106.GeneratedCodeInfo; + }; + GeneratedCodeInfo_Annotation: { + encode(message: _106.GeneratedCodeInfo_Annotation, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _106.GeneratedCodeInfo_Annotation; + fromPartial(object: { + path?: number[]; + sourceFile?: string; + begin?: number; + end?: number; + }): _106.GeneratedCodeInfo_Annotation; + }; + Any: { + encode(message: _105.Any, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _105.Any; + fromPartial(object: { + typeUrl?: string; + value?: Uint8Array; + }): _105.Any; + }; + }; +} diff --git a/packages/codegen/dist/google/protobuf/any.d.ts b/packages/codegen/dist/google/protobuf/any.d.ts new file mode 100644 index 00000000..122b1c66 --- /dev/null +++ b/packages/codegen/dist/google/protobuf/any.d.ts @@ -0,0 +1,207 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := ptypes.MarshalAny(foo) + * ... + * foo := &pb.Foo{} + * if err := ptypes.UnmarshalAny(any, foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface Any { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + typeUrl: string; + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: Uint8Array; +} +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := ptypes.MarshalAny(foo) + * ... + * foo := &pb.Foo{} + * if err := ptypes.UnmarshalAny(any, foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface AnySDKType { + type_url: string; + value: Uint8Array; +} +export declare const Any: { + encode(message: Any, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Any; + fromPartial(object: DeepPartial): Any; +}; diff --git a/packages/codegen/dist/google/protobuf/descriptor.d.ts b/packages/codegen/dist/google/protobuf/descriptor.d.ts new file mode 100644 index 00000000..32ae6277 --- /dev/null +++ b/packages/codegen/dist/google/protobuf/descriptor.d.ts @@ -0,0 +1,1172 @@ +/// +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +export declare enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1 +} +export declare const FieldDescriptorProto_TypeSDKType: typeof FieldDescriptorProto_Type; +export declare function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type; +export declare function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string; +export declare enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1 +} +export declare const FieldDescriptorProto_LabelSDKType: typeof FieldDescriptorProto_Label; +export declare function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label; +export declare function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string; +/** Generated classes can be optimized for speed or code size. */ +export declare enum FileOptions_OptimizeMode { + /** + * SPEED - Generate complete code for parsing, serialization, + * etc. + */ + SPEED = 1, + /** CODE_SIZE - Use ReflectionOps to implement these methods. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1 +} +export declare const FileOptions_OptimizeModeSDKType: typeof FileOptions_OptimizeMode; +export declare function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode; +export declare function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string; +export declare enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1 +} +export declare const FieldOptions_CTypeSDKType: typeof FieldOptions_CType; +export declare function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType; +export declare function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string; +export declare enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1 +} +export declare const FieldOptions_JSTypeSDKType: typeof FieldOptions_JSType; +export declare function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType; +export declare function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string; +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export declare enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1 +} +export declare const MethodOptions_IdempotencyLevelSDKType: typeof MethodOptions_IdempotencyLevel; +export declare function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel; +export declare function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string; +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSetSDKType { + file: FileDescriptorProtoSDKType[]; +} +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options?: FileOptions; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo?: SourceCodeInfo; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} +/** Describes a complete .proto file. */ +export interface FileDescriptorProtoSDKType { + name: string; + package: string; + dependency: string[]; + public_dependency: number[]; + weak_dependency: number[]; + message_type: DescriptorProtoSDKType[]; + enum_type: EnumDescriptorProtoSDKType[]; + service: ServiceDescriptorProtoSDKType[]; + extension: FieldDescriptorProtoSDKType[]; + options?: FileOptionsSDKType; + source_code_info?: SourceCodeInfoSDKType; + syntax: string; +} +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options?: MessageOptions; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} +/** Describes a message type. */ +export interface DescriptorProtoSDKType { + name: string; + field: FieldDescriptorProtoSDKType[]; + extension: FieldDescriptorProtoSDKType[]; + nested_type: DescriptorProtoSDKType[]; + enum_type: EnumDescriptorProtoSDKType[]; + extension_range: DescriptorProto_ExtensionRangeSDKType[]; + oneof_decl: OneofDescriptorProtoSDKType[]; + options?: MessageOptionsSDKType; + reserved_range: DescriptorProto_ReservedRangeSDKType[]; + reserved_name: string[]; +} +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options?: ExtensionRangeOptions; +} +export interface DescriptorProto_ExtensionRangeSDKType { + start: number; + end: number; + options?: ExtensionRangeOptionsSDKType; +} +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRangeSDKType { + start: number; + end: number; +} +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface ExtensionRangeOptionsSDKType { + uninterpreted_option: UninterpretedOptionSDKType[]; +} +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options?: FieldOptions; +} +/** Describes a field within a message. */ +export interface FieldDescriptorProtoSDKType { + name: string; + number: number; + label: FieldDescriptorProto_Label; + type: FieldDescriptorProto_Type; + type_name: string; + extendee: string; + default_value: string; + oneof_index: number; + json_name: string; + options?: FieldOptionsSDKType; +} +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options?: OneofOptions; +} +/** Describes a oneof. */ +export interface OneofDescriptorProtoSDKType { + name: string; + options?: OneofOptionsSDKType; +} +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options?: EnumOptions; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} +/** Describes an enum type. */ +export interface EnumDescriptorProtoSDKType { + name: string; + value: EnumValueDescriptorProtoSDKType[]; + options?: EnumOptionsSDKType; + reserved_range: EnumDescriptorProto_EnumReservedRangeSDKType[]; + reserved_name: string[]; +} +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRangeSDKType { + start: number; + end: number; +} +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options?: EnumValueOptions; +} +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProtoSDKType { + name: string; + number: number; + options?: EnumValueOptionsSDKType; +} +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options?: ServiceOptions; +} +/** Describes a service. */ +export interface ServiceDescriptorProtoSDKType { + name: string; + method: MethodDescriptorProtoSDKType[]; + options?: ServiceOptionsSDKType; +} +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options?: MethodOptions; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} +/** Describes a method of a service. */ +export interface MethodDescriptorProtoSDKType { + name: string; + input_type: string; + output_type: string; + options?: MethodOptionsSDKType; + client_streaming: boolean; + server_streaming: boolean; +} +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * If set, all the classes from the .proto file are wrapped in a single + * outer class with the given name. This applies to both Proto1 + * (equivalent to the old "--one_java_file" option) and Proto2 (where + * a .proto always translates to a single class, but you may want to + * explicitly choose the class name). + */ + javaOuterClassname: string; + /** + * If set true, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the outer class + * named by java_outer_classname. However, the outer class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** This option does nothing. */ + /** @deprecated */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} +export interface FileOptionsSDKType { + java_package: string; + java_outer_classname: string; + java_multiple_files: boolean; + /** @deprecated */ + java_generate_equals_and_hash: boolean; + java_string_check_utf8: boolean; + optimize_for: FileOptions_OptimizeMode; + go_package: string; + cc_generic_services: boolean; + java_generic_services: boolean; + py_generic_services: boolean; + php_generic_services: boolean; + deprecated: boolean; + cc_enable_arenas: boolean; + objc_class_prefix: string; + csharp_namespace: string; + swift_prefix: string; + php_class_prefix: string; + php_namespace: string; + php_metadata_namespace: string; + ruby_package: string; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface MessageOptionsSDKType { + message_set_wire_format: boolean; + no_standard_descriptor_accessor: boolean; + deprecated: boolean; + map_entry: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface FieldOptionsSDKType { + ctype: FieldOptions_CType; + packed: boolean; + jstype: FieldOptions_JSType; + lazy: boolean; + deprecated: boolean; + weak: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface OneofOptionsSDKType { + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface EnumOptionsSDKType { + allow_alias: boolean; + deprecated: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface EnumValueOptionsSDKType { + deprecated: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface ServiceOptionsSDKType { + deprecated: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface MethodOptionsSDKType { + deprecated: boolean; + idempotency_level: MethodOptions_IdempotencyLevel; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: Long; + negativeIntValue: Long; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOptionSDKType { + name: UninterpretedOption_NamePartSDKType[]; + identifier_value: string; + positive_int_value: Long; + negative_int_value: Long; + double_value: number; + string_value: Uint8Array; + aggregate_value: string; +} +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePartSDKType { + name_part: string; + is_extension: boolean; +} +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfoSDKType { + location: SourceCodeInfo_LocationSDKType[]; +} +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. *\/ + * /* Block comment attached to + * * grault. *\/ + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} +export interface SourceCodeInfo_LocationSDKType { + path: number[]; + span: number[]; + leading_comments: string; + trailing_comments: string; + leading_detached_comments: string[]; +} +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfoSDKType { + annotation: GeneratedCodeInfo_AnnotationSDKType[]; +} +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} +export interface GeneratedCodeInfo_AnnotationSDKType { + path: number[]; + source_file: string; + begin: number; + end: number; +} +export declare const FileDescriptorSet: { + encode(message: FileDescriptorSet, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet; + fromPartial(object: DeepPartial): FileDescriptorSet; +}; +export declare const FileDescriptorProto: { + encode(message: FileDescriptorProto, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto; + fromPartial(object: DeepPartial): FileDescriptorProto; +}; +export declare const DescriptorProto: { + encode(message: DescriptorProto, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto; + fromPartial(object: DeepPartial): DescriptorProto; +}; +export declare const DescriptorProto_ExtensionRange: { + encode(message: DescriptorProto_ExtensionRange, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange; + fromPartial(object: DeepPartial): DescriptorProto_ExtensionRange; +}; +export declare const DescriptorProto_ReservedRange: { + encode(message: DescriptorProto_ReservedRange, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange; + fromPartial(object: DeepPartial): DescriptorProto_ReservedRange; +}; +export declare const ExtensionRangeOptions: { + encode(message: ExtensionRangeOptions, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions; + fromPartial(object: DeepPartial): ExtensionRangeOptions; +}; +export declare const FieldDescriptorProto: { + encode(message: FieldDescriptorProto, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto; + fromPartial(object: DeepPartial): FieldDescriptorProto; +}; +export declare const OneofDescriptorProto: { + encode(message: OneofDescriptorProto, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto; + fromPartial(object: DeepPartial): OneofDescriptorProto; +}; +export declare const EnumDescriptorProto: { + encode(message: EnumDescriptorProto, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto; + fromPartial(object: DeepPartial): EnumDescriptorProto; +}; +export declare const EnumDescriptorProto_EnumReservedRange: { + encode(message: EnumDescriptorProto_EnumReservedRange, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange; + fromPartial(object: DeepPartial): EnumDescriptorProto_EnumReservedRange; +}; +export declare const EnumValueDescriptorProto: { + encode(message: EnumValueDescriptorProto, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto; + fromPartial(object: DeepPartial): EnumValueDescriptorProto; +}; +export declare const ServiceDescriptorProto: { + encode(message: ServiceDescriptorProto, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto; + fromPartial(object: DeepPartial): ServiceDescriptorProto; +}; +export declare const MethodDescriptorProto: { + encode(message: MethodDescriptorProto, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto; + fromPartial(object: DeepPartial): MethodDescriptorProto; +}; +export declare const FileOptions: { + encode(message: FileOptions, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions; + fromPartial(object: DeepPartial): FileOptions; +}; +export declare const MessageOptions: { + encode(message: MessageOptions, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions; + fromPartial(object: DeepPartial): MessageOptions; +}; +export declare const FieldOptions: { + encode(message: FieldOptions, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions; + fromPartial(object: DeepPartial): FieldOptions; +}; +export declare const OneofOptions: { + encode(message: OneofOptions, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions; + fromPartial(object: DeepPartial): OneofOptions; +}; +export declare const EnumOptions: { + encode(message: EnumOptions, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions; + fromPartial(object: DeepPartial): EnumOptions; +}; +export declare const EnumValueOptions: { + encode(message: EnumValueOptions, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions; + fromPartial(object: DeepPartial): EnumValueOptions; +}; +export declare const ServiceOptions: { + encode(message: ServiceOptions, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions; + fromPartial(object: DeepPartial): ServiceOptions; +}; +export declare const MethodOptions: { + encode(message: MethodOptions, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions; + fromPartial(object: DeepPartial): MethodOptions; +}; +export declare const UninterpretedOption: { + encode(message: UninterpretedOption, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption; + fromPartial(object: DeepPartial): UninterpretedOption; +}; +export declare const UninterpretedOption_NamePart: { + encode(message: UninterpretedOption_NamePart, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart; + fromPartial(object: DeepPartial): UninterpretedOption_NamePart; +}; +export declare const SourceCodeInfo: { + encode(message: SourceCodeInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo; + fromPartial(object: DeepPartial): SourceCodeInfo; +}; +export declare const SourceCodeInfo_Location: { + encode(message: SourceCodeInfo_Location, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location; + fromPartial(object: DeepPartial): SourceCodeInfo_Location; +}; +export declare const GeneratedCodeInfo: { + encode(message: GeneratedCodeInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo; + fromPartial(object: DeepPartial): GeneratedCodeInfo; +}; +export declare const GeneratedCodeInfo_Annotation: { + encode(message: GeneratedCodeInfo_Annotation, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation; + fromPartial(object: DeepPartial): GeneratedCodeInfo_Annotation; +}; diff --git a/packages/codegen/dist/google/protobuf/duration.d.ts b/packages/codegen/dist/google/protobuf/duration.d.ts new file mode 100644 index 00000000..7b970f92 --- /dev/null +++ b/packages/codegen/dist/google/protobuf/duration.d.ts @@ -0,0 +1,149 @@ +/// +import { Long, DeepPartial } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * A Duration represents a signed, fixed-length span of time represented + * as a count of seconds and fractions of seconds at nanosecond + * resolution. It is independent of any calendar and concepts like "day" + * or "month". It is related to Timestamp in that the difference between + * two Timestamp values is a Duration and it can be added or subtracted + * from a Timestamp. Range is approximately +-10,000 years. + * + * # Examples + * + * Example 1: Compute Duration from two Timestamps in pseudo code. + * + * Timestamp start = ...; + * Timestamp end = ...; + * Duration duration = ...; + * + * duration.seconds = end.seconds - start.seconds; + * duration.nanos = end.nanos - start.nanos; + * + * if (duration.seconds < 0 && duration.nanos > 0) { + * duration.seconds += 1; + * duration.nanos -= 1000000000; + * } else if (durations.seconds > 0 && duration.nanos < 0) { + * duration.seconds -= 1; + * duration.nanos += 1000000000; + * } + * + * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + * + * Timestamp start = ...; + * Duration duration = ...; + * Timestamp end = ...; + * + * end.seconds = start.seconds + duration.seconds; + * end.nanos = start.nanos + duration.nanos; + * + * if (end.nanos < 0) { + * end.seconds -= 1; + * end.nanos += 1000000000; + * } else if (end.nanos >= 1000000000) { + * end.seconds += 1; + * end.nanos -= 1000000000; + * } + * + * Example 3: Compute Duration from datetime.timedelta in Python. + * + * td = datetime.timedelta(days=3, minutes=10) + * duration = Duration() + * duration.FromTimedelta(td) + * + * # JSON Mapping + * + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". + */ +export interface Duration { + /** + * Signed seconds of the span of time. Must be from -315,576,000,000 + * to +315,576,000,000 inclusive. Note: these bounds are computed from: + * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + */ + seconds: Long; + /** + * Signed fractions of a second at nanosecond resolution of the span + * of time. Durations less than one second are represented with a 0 + * `seconds` field and a positive or negative `nanos` field. For durations + * of one second or more, a non-zero value for the `nanos` field must be + * of the same sign as the `seconds` field. Must be from -999,999,999 + * to +999,999,999 inclusive. + */ + nanos: number; +} +/** + * A Duration represents a signed, fixed-length span of time represented + * as a count of seconds and fractions of seconds at nanosecond + * resolution. It is independent of any calendar and concepts like "day" + * or "month". It is related to Timestamp in that the difference between + * two Timestamp values is a Duration and it can be added or subtracted + * from a Timestamp. Range is approximately +-10,000 years. + * + * # Examples + * + * Example 1: Compute Duration from two Timestamps in pseudo code. + * + * Timestamp start = ...; + * Timestamp end = ...; + * Duration duration = ...; + * + * duration.seconds = end.seconds - start.seconds; + * duration.nanos = end.nanos - start.nanos; + * + * if (duration.seconds < 0 && duration.nanos > 0) { + * duration.seconds += 1; + * duration.nanos -= 1000000000; + * } else if (durations.seconds > 0 && duration.nanos < 0) { + * duration.seconds -= 1; + * duration.nanos += 1000000000; + * } + * + * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + * + * Timestamp start = ...; + * Duration duration = ...; + * Timestamp end = ...; + * + * end.seconds = start.seconds + duration.seconds; + * end.nanos = start.nanos + duration.nanos; + * + * if (end.nanos < 0) { + * end.seconds -= 1; + * end.nanos += 1000000000; + * } else if (end.nanos >= 1000000000) { + * end.seconds += 1; + * end.nanos -= 1000000000; + * } + * + * Example 3: Compute Duration from datetime.timedelta in Python. + * + * td = datetime.timedelta(days=3, minutes=10) + * duration = Duration() + * duration.FromTimedelta(td) + * + * # JSON Mapping + * + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". + */ +export interface DurationSDKType { + seconds: Long; + nanos: number; +} +export declare const Duration: { + encode(message: Duration, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Duration; + fromPartial(object: DeepPartial): Duration; +}; diff --git a/packages/codegen/dist/google/protobuf/empty.d.ts b/packages/codegen/dist/google/protobuf/empty.d.ts new file mode 100644 index 00000000..91c16741 --- /dev/null +++ b/packages/codegen/dist/google/protobuf/empty.d.ts @@ -0,0 +1,33 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** + * A generic empty message that you can re-use to avoid defining duplicated + * empty messages in your APIs. A typical example is to use it as the request + * or the response type of an API method. For instance: + * + * service Foo { + * rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + * } + * + * The JSON representation for `Empty` is empty JSON object `{}`. + */ +export interface Empty { +} +/** + * A generic empty message that you can re-use to avoid defining duplicated + * empty messages in your APIs. A typical example is to use it as the request + * or the response type of an API method. For instance: + * + * service Foo { + * rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + * } + * + * The JSON representation for `Empty` is empty JSON object `{}`. + */ +export interface EmptySDKType { +} +export declare const Empty: { + encode(_: Empty, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Empty; + fromPartial(_: DeepPartial): Empty; +}; diff --git a/packages/codegen/dist/google/protobuf/timestamp.d.ts b/packages/codegen/dist/google/protobuf/timestamp.d.ts new file mode 100644 index 00000000..523728e1 --- /dev/null +++ b/packages/codegen/dist/google/protobuf/timestamp.d.ts @@ -0,0 +1,195 @@ +/// +import { Long, DeepPartial } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * + * Example 5: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: Long; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + nanos: number; +} +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * + * Example 5: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export interface TimestampSDKType { + seconds: Long; + nanos: number; +} +export declare const Timestamp: { + encode(message: Timestamp, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Timestamp; + fromPartial(object: DeepPartial): Timestamp; +}; diff --git a/packages/codegen/dist/helpers.d.ts b/packages/codegen/dist/helpers.d.ts new file mode 100644 index 00000000..02a5732c --- /dev/null +++ b/packages/codegen/dist/helpers.d.ts @@ -0,0 +1,82 @@ +/** +* This file and any referenced files were automatically generated by @osmonauts/telescope@0.88.2 +* DO NOT MODIFY BY HAND. Instead, download the latest proto files for your chain +* and run the transpile command or yarn proto command to regenerate this bundle. +*/ +import Long from 'long'; +export { Long }; +export declare function bytesFromBase64(b64: string): Uint8Array; +export declare function base64FromBytes(arr: Uint8Array): string; +export interface AminoHeight { + readonly revision_number?: string; + readonly revision_height?: string; +} +export declare function omitDefault(input: T): T | undefined; +interface Duration { + /** + * Signed seconds of the span of time. Must be from -315,576,000,000 + * to +315,576,000,000 inclusive. Note: these bounds are computed from: + * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + */ + seconds: Long; + /** + * Signed fractions of a second at nanosecond resolution of the span + * of time. Durations less than one second are represented with a 0 + * `seconds` field and a positive or negative `nanos` field. For durations + * of one second or more, a non-zero value for the `nanos` field must be + * of the same sign as the `seconds` field. Must be from -999,999,999 + * to +999,999,999 inclusive. + */ + nanos: number; +} +export declare function toDuration(duration: string): Duration; +export declare function fromDuration(duration: Duration): string; +export declare function isSet(value: any): boolean; +export declare function isObject(value: any): boolean; +export interface PageRequest { + key: Uint8Array; + offset: Long; + limit: Long; + countTotal: boolean; + reverse: boolean; +} +export interface PageRequestParams { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; +} +export interface Params { + params: PageRequestParams; +} +export declare const setPaginationParams: (options: Params, pagination?: PageRequest) => Params; +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; +export type DeepPartial = T extends Builtin ? T : T extends Long ? string | number | Long : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> : T extends {} ? { + [K in keyof T]?: DeepPartial; +} : Partial; +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P : P & { + [K in keyof P]: Exact; +} & Record>, never>; +export interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} +interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: Long; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + nanos: number; +} +export declare function toTimestamp(date: Date): Timestamp; +export declare function fromTimestamp(t: Timestamp): Date; +export declare function fromJsonTimestamp(o: any): Timestamp; diff --git a/packages/codegen/dist/ibc/applications/transfer/v1/genesis.d.ts b/packages/codegen/dist/ibc/applications/transfer/v1/genesis.d.ts new file mode 100644 index 00000000..4bbee85f --- /dev/null +++ b/packages/codegen/dist/ibc/applications/transfer/v1/genesis.d.ts @@ -0,0 +1,20 @@ +import { DenomTrace, DenomTraceSDKType, Params, ParamsSDKType } from "./transfer"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** GenesisState defines the ibc-transfer genesis state */ +export interface GenesisState { + portId: string; + denomTraces: DenomTrace[]; + params?: Params; +} +/** GenesisState defines the ibc-transfer genesis state */ +export interface GenesisStateSDKType { + port_id: string; + denom_traces: DenomTraceSDKType[]; + params?: ParamsSDKType; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/ibc/applications/transfer/v1/query.d.ts b/packages/codegen/dist/ibc/applications/transfer/v1/query.d.ts new file mode 100644 index 00000000..9657b84a --- /dev/null +++ b/packages/codegen/dist/ibc/applications/transfer/v1/query.d.ts @@ -0,0 +1,112 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../../../cosmos/base/query/v1beta1/pagination"; +import { DenomTrace, DenomTraceSDKType, Params, ParamsSDKType } from "./transfer"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * QueryDenomTraceRequest is the request type for the Query/DenomTrace RPC + * method + */ +export interface QueryDenomTraceRequest { + /** hash (in hex format) of the denomination trace information. */ + hash: string; +} +/** + * QueryDenomTraceRequest is the request type for the Query/DenomTrace RPC + * method + */ +export interface QueryDenomTraceRequestSDKType { + hash: string; +} +/** + * QueryDenomTraceResponse is the response type for the Query/DenomTrace RPC + * method. + */ +export interface QueryDenomTraceResponse { + /** denom_trace returns the requested denomination trace information. */ + denomTrace?: DenomTrace; +} +/** + * QueryDenomTraceResponse is the response type for the Query/DenomTrace RPC + * method. + */ +export interface QueryDenomTraceResponseSDKType { + denom_trace?: DenomTraceSDKType; +} +/** + * QueryConnectionsRequest is the request type for the Query/DenomTraces RPC + * method + */ +export interface QueryDenomTracesRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryConnectionsRequest is the request type for the Query/DenomTraces RPC + * method + */ +export interface QueryDenomTracesRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryConnectionsResponse is the response type for the Query/DenomTraces RPC + * method. + */ +export interface QueryDenomTracesResponse { + /** denom_traces returns all denominations trace information. */ + denomTraces: DenomTrace[]; + /** pagination defines the pagination in the response. */ + pagination?: PageResponse; +} +/** + * QueryConnectionsResponse is the response type for the Query/DenomTraces RPC + * method. + */ +export interface QueryDenomTracesResponseSDKType { + denom_traces: DenomTraceSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequest { +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequestSDKType { +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params?: Params; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +export declare const QueryDenomTraceRequest: { + encode(message: QueryDenomTraceRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomTraceRequest; + fromPartial(object: DeepPartial): QueryDenomTraceRequest; +}; +export declare const QueryDenomTraceResponse: { + encode(message: QueryDenomTraceResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomTraceResponse; + fromPartial(object: DeepPartial): QueryDenomTraceResponse; +}; +export declare const QueryDenomTracesRequest: { + encode(message: QueryDenomTracesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomTracesRequest; + fromPartial(object: DeepPartial): QueryDenomTracesRequest; +}; +export declare const QueryDenomTracesResponse: { + encode(message: QueryDenomTracesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomTracesResponse; + fromPartial(object: DeepPartial): QueryDenomTracesResponse; +}; +export declare const QueryParamsRequest: { + encode(_: QueryParamsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest; + fromPartial(_: DeepPartial): QueryParamsRequest; +}; +export declare const QueryParamsResponse: { + encode(message: QueryParamsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse; + fromPartial(object: DeepPartial): QueryParamsResponse; +}; diff --git a/packages/codegen/dist/ibc/applications/transfer/v1/query.lcd.d.ts b/packages/codegen/dist/ibc/applications/transfer/v1/query.lcd.d.ts new file mode 100644 index 00000000..41187bb2 --- /dev/null +++ b/packages/codegen/dist/ibc/applications/transfer/v1/query.lcd.d.ts @@ -0,0 +1,11 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryDenomTraceRequest, QueryDenomTraceResponseSDKType, QueryDenomTracesRequest, QueryDenomTracesResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + denomTrace(params: QueryDenomTraceRequest): Promise; + denomTraces(params?: QueryDenomTracesRequest): Promise; + params(_params?: QueryParamsRequest): Promise; +} diff --git a/packages/codegen/dist/ibc/applications/transfer/v1/query.rpc.Query.d.ts b/packages/codegen/dist/ibc/applications/transfer/v1/query.rpc.Query.d.ts new file mode 100644 index 00000000..d86b260e --- /dev/null +++ b/packages/codegen/dist/ibc/applications/transfer/v1/query.rpc.Query.d.ts @@ -0,0 +1,24 @@ +import { Rpc } from "../../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryDenomTraceRequest, QueryDenomTraceResponse, QueryDenomTracesRequest, QueryDenomTracesResponse, QueryParamsRequest, QueryParamsResponse } from "./query"; +/** Query provides defines the gRPC querier service. */ +export interface Query { + /** DenomTrace queries a denomination trace information. */ + denomTrace(request: QueryDenomTraceRequest): Promise; + /** DenomTraces queries all denomination traces. */ + denomTraces(request?: QueryDenomTracesRequest): Promise; + /** Params queries all parameters of the ibc-transfer module. */ + params(request?: QueryParamsRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + denomTrace(request: QueryDenomTraceRequest): Promise; + denomTraces(request?: QueryDenomTracesRequest): Promise; + params(request?: QueryParamsRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + denomTrace(request: QueryDenomTraceRequest): Promise; + denomTraces(request?: QueryDenomTracesRequest): Promise; + params(request?: QueryParamsRequest): Promise; +}; diff --git a/packages/codegen/dist/ibc/applications/transfer/v1/transfer.d.ts b/packages/codegen/dist/ibc/applications/transfer/v1/transfer.d.ts new file mode 100644 index 00000000..b756617b --- /dev/null +++ b/packages/codegen/dist/ibc/applications/transfer/v1/transfer.d.ts @@ -0,0 +1,61 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * DenomTrace contains the base denomination for ICS20 fungible tokens and the + * source tracing information path. + */ +export interface DenomTrace { + /** + * path defines the chain of port/channel identifiers used for tracing the + * source of the fungible token. + */ + path: string; + /** base denomination of the relayed fungible token. */ + baseDenom: string; +} +/** + * DenomTrace contains the base denomination for ICS20 fungible tokens and the + * source tracing information path. + */ +export interface DenomTraceSDKType { + path: string; + base_denom: string; +} +/** + * Params defines the set of IBC transfer parameters. + * NOTE: To prevent a single token from being transferred, set the + * TransfersEnabled parameter to true and then set the bank module's SendEnabled + * parameter for the denomination to false. + */ +export interface Params { + /** + * send_enabled enables or disables all cross-chain token transfers from this + * chain. + */ + sendEnabled: boolean; + /** + * receive_enabled enables or disables all cross-chain token transfers to this + * chain. + */ + receiveEnabled: boolean; +} +/** + * Params defines the set of IBC transfer parameters. + * NOTE: To prevent a single token from being transferred, set the + * TransfersEnabled parameter to true and then set the bank module's SendEnabled + * parameter for the denomination to false. + */ +export interface ParamsSDKType { + send_enabled: boolean; + receive_enabled: boolean; +} +export declare const DenomTrace: { + encode(message: DenomTrace, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DenomTrace; + fromPartial(object: DeepPartial): DenomTrace; +}; +export declare const Params: { + encode(message: Params, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Params; + fromPartial(object: DeepPartial): Params; +}; diff --git a/packages/codegen/dist/ibc/applications/transfer/v1/tx.d.ts b/packages/codegen/dist/ibc/applications/transfer/v1/tx.d.ts new file mode 100644 index 00000000..2b24603a --- /dev/null +++ b/packages/codegen/dist/ibc/applications/transfer/v1/tx.d.ts @@ -0,0 +1,62 @@ +/// +import { Coin, CoinSDKType } from "../../../../cosmos/base/v1beta1/coin"; +import { Height, HeightSDKType } from "../../../core/client/v1/client"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * MsgTransfer defines a msg to transfer fungible tokens (i.e Coins) between + * ICS20 enabled chains. See ICS Spec here: + * https://github.com/cosmos/ibc/tree/master/spec/app/ics-020-fungible-token-transfer#data-structures + */ +export interface MsgTransfer { + /** the port on which the packet will be sent */ + sourcePort: string; + /** the channel by which the packet will be sent */ + sourceChannel: string; + /** the tokens to be transferred */ + token?: Coin; + /** the sender address */ + sender: string; + /** the recipient address on the destination chain */ + receiver: string; + /** + * Timeout height relative to the current block height. + * The timeout is disabled when set to 0. + */ + timeoutHeight?: Height; + /** + * Timeout timestamp (in nanoseconds) relative to the current block timestamp. + * The timeout is disabled when set to 0. + */ + timeoutTimestamp: Long; +} +/** + * MsgTransfer defines a msg to transfer fungible tokens (i.e Coins) between + * ICS20 enabled chains. See ICS Spec here: + * https://github.com/cosmos/ibc/tree/master/spec/app/ics-020-fungible-token-transfer#data-structures + */ +export interface MsgTransferSDKType { + source_port: string; + source_channel: string; + token?: CoinSDKType; + sender: string; + receiver: string; + timeout_height?: HeightSDKType; + timeout_timestamp: Long; +} +/** MsgTransferResponse defines the Msg/Transfer response type. */ +export interface MsgTransferResponse { +} +/** MsgTransferResponse defines the Msg/Transfer response type. */ +export interface MsgTransferResponseSDKType { +} +export declare const MsgTransfer: { + encode(message: MsgTransfer, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgTransfer; + fromPartial(object: DeepPartial): MsgTransfer; +}; +export declare const MsgTransferResponse: { + encode(_: MsgTransferResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgTransferResponse; + fromPartial(_: DeepPartial): MsgTransferResponse; +}; diff --git a/packages/codegen/dist/ibc/applications/transfer/v1/tx.rpc.msg.d.ts b/packages/codegen/dist/ibc/applications/transfer/v1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..d000d7b5 --- /dev/null +++ b/packages/codegen/dist/ibc/applications/transfer/v1/tx.rpc.msg.d.ts @@ -0,0 +1,12 @@ +import { Rpc } from "../../../../helpers"; +import { MsgTransfer, MsgTransferResponse } from "./tx"; +/** Msg defines the ibc/transfer Msg service. */ +export interface Msg { + /** Transfer defines a rpc handler method for MsgTransfer. */ + transfer(request: MsgTransfer): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + transfer(request: MsgTransfer): Promise; +} diff --git a/packages/codegen/dist/ibc/applications/transfer/v2/packet.d.ts b/packages/codegen/dist/ibc/applications/transfer/v2/packet.d.ts new file mode 100644 index 00000000..fbd3d0e5 --- /dev/null +++ b/packages/codegen/dist/ibc/applications/transfer/v2/packet.d.ts @@ -0,0 +1,33 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * FungibleTokenPacketData defines a struct for the packet payload + * See FungibleTokenPacketData spec: + * https://github.com/cosmos/ibc/tree/master/spec/app/ics-020-fungible-token-transfer#data-structures + */ +export interface FungibleTokenPacketData { + /** the token denomination to be transferred */ + denom: string; + /** the token amount to be transferred */ + amount: string; + /** the sender address */ + sender: string; + /** the recipient address on the destination chain */ + receiver: string; +} +/** + * FungibleTokenPacketData defines a struct for the packet payload + * See FungibleTokenPacketData spec: + * https://github.com/cosmos/ibc/tree/master/spec/app/ics-020-fungible-token-transfer#data-structures + */ +export interface FungibleTokenPacketDataSDKType { + denom: string; + amount: string; + sender: string; + receiver: string; +} +export declare const FungibleTokenPacketData: { + encode(message: FungibleTokenPacketData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): FungibleTokenPacketData; + fromPartial(object: DeepPartial): FungibleTokenPacketData; +}; diff --git a/packages/codegen/dist/ibc/bundle.d.ts b/packages/codegen/dist/ibc/bundle.d.ts new file mode 100644 index 00000000..57b561c4 --- /dev/null +++ b/packages/codegen/dist/ibc/bundle.d.ts @@ -0,0 +1,3259 @@ +/// +import * as _110 from "./applications/transfer/v1/genesis"; +import * as _111 from "./applications/transfer/v1/query"; +import * as _112 from "./applications/transfer/v1/transfer"; +import * as _113 from "./applications/transfer/v1/tx"; +import * as _114 from "./applications/transfer/v2/packet"; +import * as _115 from "./core/channel/v1/channel"; +import * as _116 from "./core/channel/v1/genesis"; +import * as _117 from "./core/channel/v1/query"; +import * as _118 from "./core/channel/v1/tx"; +import * as _119 from "./core/client/v1/client"; +import * as _120 from "./core/client/v1/genesis"; +import * as _121 from "./core/client/v1/query"; +import * as _122 from "./core/client/v1/tx"; +import * as _123 from "./core/commitment/v1/commitment"; +import * as _124 from "./core/connection/v1/connection"; +import * as _125 from "./core/connection/v1/genesis"; +import * as _126 from "./core/connection/v1/query"; +import * as _127 from "./core/connection/v1/tx"; +import * as _128 from "./core/port/v1/query"; +import * as _129 from "./core/types/v1/genesis"; +import * as _130 from "./lightclients/localhost/v1/localhost"; +import * as _131 from "./lightclients/solomachine/v1/solomachine"; +import * as _132 from "./lightclients/solomachine/v2/solomachine"; +import * as _133 from "./lightclients/tendermint/v1/tendermint"; +import * as _197 from "./applications/transfer/v1/query.lcd"; +import * as _198 from "./core/channel/v1/query.lcd"; +import * as _199 from "./core/client/v1/query.lcd"; +import * as _200 from "./core/connection/v1/query.lcd"; +import * as _201 from "./applications/transfer/v1/query.rpc.Query"; +import * as _202 from "./core/channel/v1/query.rpc.Query"; +import * as _203 from "./core/client/v1/query.rpc.Query"; +import * as _204 from "./core/connection/v1/query.rpc.Query"; +import * as _205 from "./core/port/v1/query.rpc.Query"; +import * as _206 from "./applications/transfer/v1/tx.rpc.msg"; +import * as _207 from "./core/channel/v1/tx.rpc.msg"; +import * as _208 from "./core/client/v1/tx.rpc.msg"; +import * as _209 from "./core/connection/v1/tx.rpc.msg"; +export declare namespace ibc { + namespace applications { + namespace transfer { + const v1: { + MsgClientImpl: typeof _206.MsgClientImpl; + QueryClientImpl: typeof _201.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + denomTrace(request: _111.QueryDenomTraceRequest): Promise<_111.QueryDenomTraceResponse>; + denomTraces(request?: _111.QueryDenomTracesRequest): Promise<_111.QueryDenomTracesResponse>; + params(request?: _111.QueryParamsRequest): Promise<_111.QueryParamsResponse>; + }; + LCDQueryClient: typeof _197.LCDQueryClient; + MsgTransfer: { + encode(message: _113.MsgTransfer, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _113.MsgTransfer; + fromPartial(object: { + sourcePort?: string; + sourceChannel?: string; + token?: { + denom?: string; + amount?: string; + }; + sender?: string; + receiver?: string; + timeoutHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + timeoutTimestamp?: string | number | import("long").Long; + }): _113.MsgTransfer; + }; + MsgTransferResponse: { + encode(_: _113.MsgTransferResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _113.MsgTransferResponse; + fromPartial(_: {}): _113.MsgTransferResponse; + }; + DenomTrace: { + encode(message: _112.DenomTrace, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _112.DenomTrace; + fromPartial(object: { + path?: string; + baseDenom?: string; + }): _112.DenomTrace; + }; + Params: { + encode(message: _112.Params, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _112.Params; + fromPartial(object: { + sendEnabled?: boolean; + receiveEnabled?: boolean; + }): _112.Params; + }; + QueryDenomTraceRequest: { + encode(message: _111.QueryDenomTraceRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _111.QueryDenomTraceRequest; + fromPartial(object: { + hash?: string; + }): _111.QueryDenomTraceRequest; + }; + QueryDenomTraceResponse: { + encode(message: _111.QueryDenomTraceResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _111.QueryDenomTraceResponse; + fromPartial(object: { + denomTrace?: { + path?: string; + baseDenom?: string; + }; + }): _111.QueryDenomTraceResponse; + }; + QueryDenomTracesRequest: { + encode(message: _111.QueryDenomTracesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _111.QueryDenomTracesRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _111.QueryDenomTracesRequest; + }; + QueryDenomTracesResponse: { + encode(message: _111.QueryDenomTracesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _111.QueryDenomTracesResponse; + fromPartial(object: { + denomTraces?: { + path?: string; + baseDenom?: string; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _111.QueryDenomTracesResponse; + }; + QueryParamsRequest: { + encode(_: _111.QueryParamsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _111.QueryParamsRequest; + fromPartial(_: {}): _111.QueryParamsRequest; + }; + QueryParamsResponse: { + encode(message: _111.QueryParamsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _111.QueryParamsResponse; + fromPartial(object: { + params?: { + sendEnabled?: boolean; + receiveEnabled?: boolean; + }; + }): _111.QueryParamsResponse; + }; + GenesisState: { + encode(message: _110.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _110.GenesisState; + fromPartial(object: { + portId?: string; + denomTraces?: { + path?: string; + baseDenom?: string; + }[]; + params?: { + sendEnabled?: boolean; + receiveEnabled?: boolean; + }; + }): _110.GenesisState; + }; + }; + const v2: { + FungibleTokenPacketData: { + encode(message: _114.FungibleTokenPacketData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _114.FungibleTokenPacketData; + fromPartial(object: { + denom?: string; + amount?: string; + sender?: string; + receiver?: string; + }): _114.FungibleTokenPacketData; + }; + }; + } + } + namespace core { + namespace channel { + const v1: { + MsgClientImpl: typeof _207.MsgClientImpl; + QueryClientImpl: typeof _202.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + channel(request: _117.QueryChannelRequest): Promise<_117.QueryChannelResponse>; + channels(request?: _117.QueryChannelsRequest): Promise<_117.QueryChannelsResponse>; + connectionChannels(request: _117.QueryConnectionChannelsRequest): Promise<_117.QueryConnectionChannelsResponse>; + channelClientState(request: _117.QueryChannelClientStateRequest): Promise<_117.QueryChannelClientStateResponse>; + channelConsensusState(request: _117.QueryChannelConsensusStateRequest): Promise<_117.QueryChannelConsensusStateResponse>; + packetCommitment(request: _117.QueryPacketCommitmentRequest): Promise<_117.QueryPacketCommitmentResponse>; + packetCommitments(request: _117.QueryPacketCommitmentsRequest): Promise<_117.QueryPacketCommitmentsResponse>; + packetReceipt(request: _117.QueryPacketReceiptRequest): Promise<_117.QueryPacketReceiptResponse>; + packetAcknowledgement(request: _117.QueryPacketAcknowledgementRequest): Promise<_117.QueryPacketAcknowledgementResponse>; + packetAcknowledgements(request: _117.QueryPacketAcknowledgementsRequest): Promise<_117.QueryPacketAcknowledgementsResponse>; + unreceivedPackets(request: _117.QueryUnreceivedPacketsRequest): Promise<_117.QueryUnreceivedPacketsResponse>; + unreceivedAcks(request: _117.QueryUnreceivedAcksRequest): Promise<_117.QueryUnreceivedAcksResponse>; + nextSequenceReceive(request: _117.QueryNextSequenceReceiveRequest): Promise<_117.QueryNextSequenceReceiveResponse>; + }; + LCDQueryClient: typeof _198.LCDQueryClient; + MsgChannelOpenInit: { + encode(message: _118.MsgChannelOpenInit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgChannelOpenInit; + fromPartial(object: { + portId?: string; + channel?: { + state?: _115.State; + ordering?: _115.Order; + counterparty?: { + portId?: string; + channelId?: string; + }; + connectionHops?: string[]; + version?: string; + }; + signer?: string; + }): _118.MsgChannelOpenInit; + }; + MsgChannelOpenInitResponse: { + encode(_: _118.MsgChannelOpenInitResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgChannelOpenInitResponse; + fromPartial(_: {}): _118.MsgChannelOpenInitResponse; + }; + MsgChannelOpenTry: { + encode(message: _118.MsgChannelOpenTry, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgChannelOpenTry; + fromPartial(object: { + portId?: string; + previousChannelId?: string; + channel?: { + state?: _115.State; + ordering?: _115.Order; + counterparty?: { + portId?: string; + channelId?: string; + }; + connectionHops?: string[]; + version?: string; + }; + counterpartyVersion?: string; + proofInit?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + signer?: string; + }): _118.MsgChannelOpenTry; + }; + MsgChannelOpenTryResponse: { + encode(_: _118.MsgChannelOpenTryResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgChannelOpenTryResponse; + fromPartial(_: {}): _118.MsgChannelOpenTryResponse; + }; + MsgChannelOpenAck: { + encode(message: _118.MsgChannelOpenAck, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgChannelOpenAck; + fromPartial(object: { + portId?: string; + channelId?: string; + counterpartyChannelId?: string; + counterpartyVersion?: string; + proofTry?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + signer?: string; + }): _118.MsgChannelOpenAck; + }; + MsgChannelOpenAckResponse: { + encode(_: _118.MsgChannelOpenAckResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgChannelOpenAckResponse; + fromPartial(_: {}): _118.MsgChannelOpenAckResponse; + }; + MsgChannelOpenConfirm: { + encode(message: _118.MsgChannelOpenConfirm, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgChannelOpenConfirm; + fromPartial(object: { + portId?: string; + channelId?: string; + proofAck?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + signer?: string; + }): _118.MsgChannelOpenConfirm; + }; + MsgChannelOpenConfirmResponse: { + encode(_: _118.MsgChannelOpenConfirmResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgChannelOpenConfirmResponse; + fromPartial(_: {}): _118.MsgChannelOpenConfirmResponse; + }; + MsgChannelCloseInit: { + encode(message: _118.MsgChannelCloseInit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgChannelCloseInit; + fromPartial(object: { + portId?: string; + channelId?: string; + signer?: string; + }): _118.MsgChannelCloseInit; + }; + MsgChannelCloseInitResponse: { + encode(_: _118.MsgChannelCloseInitResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgChannelCloseInitResponse; + fromPartial(_: {}): _118.MsgChannelCloseInitResponse; + }; + MsgChannelCloseConfirm: { + encode(message: _118.MsgChannelCloseConfirm, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgChannelCloseConfirm; + fromPartial(object: { + portId?: string; + channelId?: string; + proofInit?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + signer?: string; + }): _118.MsgChannelCloseConfirm; + }; + MsgChannelCloseConfirmResponse: { + encode(_: _118.MsgChannelCloseConfirmResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgChannelCloseConfirmResponse; + fromPartial(_: {}): _118.MsgChannelCloseConfirmResponse; + }; + MsgRecvPacket: { + encode(message: _118.MsgRecvPacket, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgRecvPacket; + fromPartial(object: { + packet?: { + sequence?: string | number | import("long").Long; + sourcePort?: string; + sourceChannel?: string; + destinationPort?: string; + destinationChannel?: string; + data?: Uint8Array; + timeoutHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + timeoutTimestamp?: string | number | import("long").Long; + }; + proofCommitment?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + signer?: string; + }): _118.MsgRecvPacket; + }; + MsgRecvPacketResponse: { + encode(_: _118.MsgRecvPacketResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgRecvPacketResponse; + fromPartial(_: {}): _118.MsgRecvPacketResponse; + }; + MsgTimeout: { + encode(message: _118.MsgTimeout, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgTimeout; + fromPartial(object: { + packet?: { + sequence?: string | number | import("long").Long; + sourcePort?: string; + sourceChannel?: string; + destinationPort?: string; + destinationChannel?: string; + data?: Uint8Array; + timeoutHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + timeoutTimestamp?: string | number | import("long").Long; + }; + proofUnreceived?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + nextSequenceRecv?: string | number | import("long").Long; + signer?: string; + }): _118.MsgTimeout; + }; + MsgTimeoutResponse: { + encode(_: _118.MsgTimeoutResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgTimeoutResponse; + fromPartial(_: {}): _118.MsgTimeoutResponse; + }; + MsgTimeoutOnClose: { + encode(message: _118.MsgTimeoutOnClose, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgTimeoutOnClose; + fromPartial(object: { + packet?: { + sequence?: string | number | import("long").Long; + sourcePort?: string; + sourceChannel?: string; + destinationPort?: string; + destinationChannel?: string; + data?: Uint8Array; + timeoutHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + timeoutTimestamp?: string | number | import("long").Long; + }; + proofUnreceived?: Uint8Array; + proofClose?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + nextSequenceRecv?: string | number | import("long").Long; + signer?: string; + }): _118.MsgTimeoutOnClose; + }; + MsgTimeoutOnCloseResponse: { + encode(_: _118.MsgTimeoutOnCloseResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgTimeoutOnCloseResponse; + fromPartial(_: {}): _118.MsgTimeoutOnCloseResponse; + }; + MsgAcknowledgement: { + encode(message: _118.MsgAcknowledgement, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgAcknowledgement; + fromPartial(object: { + packet?: { + sequence?: string | number | import("long").Long; + sourcePort?: string; + sourceChannel?: string; + destinationPort?: string; + destinationChannel?: string; + data?: Uint8Array; + timeoutHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + timeoutTimestamp?: string | number | import("long").Long; + }; + acknowledgement?: Uint8Array; + proofAcked?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + signer?: string; + }): _118.MsgAcknowledgement; + }; + MsgAcknowledgementResponse: { + encode(_: _118.MsgAcknowledgementResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _118.MsgAcknowledgementResponse; + fromPartial(_: {}): _118.MsgAcknowledgementResponse; + }; + QueryChannelRequest: { + encode(message: _117.QueryChannelRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryChannelRequest; + fromPartial(object: { + portId?: string; + channelId?: string; + }): _117.QueryChannelRequest; + }; + QueryChannelResponse: { + encode(message: _117.QueryChannelResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryChannelResponse; + fromPartial(object: { + channel?: { + state?: _115.State; + ordering?: _115.Order; + counterparty?: { + portId?: string; + channelId?: string; + }; + connectionHops?: string[]; + version?: string; + }; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryChannelResponse; + }; + QueryChannelsRequest: { + encode(message: _117.QueryChannelsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryChannelsRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _117.QueryChannelsRequest; + }; + QueryChannelsResponse: { + encode(message: _117.QueryChannelsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryChannelsResponse; + fromPartial(object: { + channels?: { + state?: _115.State; + ordering?: _115.Order; + counterparty?: { + portId?: string; + channelId?: string; + }; + connectionHops?: string[]; + version?: string; + portId?: string; + channelId?: string; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryChannelsResponse; + }; + QueryConnectionChannelsRequest: { + encode(message: _117.QueryConnectionChannelsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryConnectionChannelsRequest; + fromPartial(object: { + connection?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _117.QueryConnectionChannelsRequest; + }; + QueryConnectionChannelsResponse: { + encode(message: _117.QueryConnectionChannelsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryConnectionChannelsResponse; + fromPartial(object: { + channels?: { + state?: _115.State; + ordering?: _115.Order; + counterparty?: { + portId?: string; + channelId?: string; + }; + connectionHops?: string[]; + version?: string; + portId?: string; + channelId?: string; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryConnectionChannelsResponse; + }; + QueryChannelClientStateRequest: { + encode(message: _117.QueryChannelClientStateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryChannelClientStateRequest; + fromPartial(object: { + portId?: string; + channelId?: string; + }): _117.QueryChannelClientStateRequest; + }; + QueryChannelClientStateResponse: { + encode(message: _117.QueryChannelClientStateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryChannelClientStateResponse; + fromPartial(object: { + identifiedClientState?: { + clientId?: string; + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryChannelClientStateResponse; + }; + QueryChannelConsensusStateRequest: { + encode(message: _117.QueryChannelConsensusStateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryChannelConsensusStateRequest; + fromPartial(object: { + portId?: string; + channelId?: string; + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }): _117.QueryChannelConsensusStateRequest; + }; + QueryChannelConsensusStateResponse: { + encode(message: _117.QueryChannelConsensusStateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryChannelConsensusStateResponse; + fromPartial(object: { + consensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + clientId?: string; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryChannelConsensusStateResponse; + }; + QueryPacketCommitmentRequest: { + encode(message: _117.QueryPacketCommitmentRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryPacketCommitmentRequest; + fromPartial(object: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + }): _117.QueryPacketCommitmentRequest; + }; + QueryPacketCommitmentResponse: { + encode(message: _117.QueryPacketCommitmentResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryPacketCommitmentResponse; + fromPartial(object: { + commitment?: Uint8Array; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryPacketCommitmentResponse; + }; + QueryPacketCommitmentsRequest: { + encode(message: _117.QueryPacketCommitmentsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryPacketCommitmentsRequest; + fromPartial(object: { + portId?: string; + channelId?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _117.QueryPacketCommitmentsRequest; + }; + QueryPacketCommitmentsResponse: { + encode(message: _117.QueryPacketCommitmentsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryPacketCommitmentsResponse; + fromPartial(object: { + commitments?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + data?: Uint8Array; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryPacketCommitmentsResponse; + }; + QueryPacketReceiptRequest: { + encode(message: _117.QueryPacketReceiptRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryPacketReceiptRequest; + fromPartial(object: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + }): _117.QueryPacketReceiptRequest; + }; + QueryPacketReceiptResponse: { + encode(message: _117.QueryPacketReceiptResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryPacketReceiptResponse; + fromPartial(object: { + received?: boolean; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryPacketReceiptResponse; + }; + QueryPacketAcknowledgementRequest: { + encode(message: _117.QueryPacketAcknowledgementRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryPacketAcknowledgementRequest; + fromPartial(object: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + }): _117.QueryPacketAcknowledgementRequest; + }; + QueryPacketAcknowledgementResponse: { + encode(message: _117.QueryPacketAcknowledgementResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryPacketAcknowledgementResponse; + fromPartial(object: { + acknowledgement?: Uint8Array; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryPacketAcknowledgementResponse; + }; + QueryPacketAcknowledgementsRequest: { + encode(message: _117.QueryPacketAcknowledgementsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryPacketAcknowledgementsRequest; + fromPartial(object: { + portId?: string; + channelId?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + packetCommitmentSequences?: (string | number | import("long").Long)[]; + }): _117.QueryPacketAcknowledgementsRequest; + }; + QueryPacketAcknowledgementsResponse: { + encode(message: _117.QueryPacketAcknowledgementsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryPacketAcknowledgementsResponse; + fromPartial(object: { + acknowledgements?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + data?: Uint8Array; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryPacketAcknowledgementsResponse; + }; + QueryUnreceivedPacketsRequest: { + encode(message: _117.QueryUnreceivedPacketsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryUnreceivedPacketsRequest; + fromPartial(object: { + portId?: string; + channelId?: string; + packetCommitmentSequences?: (string | number | import("long").Long)[]; + }): _117.QueryUnreceivedPacketsRequest; + }; + QueryUnreceivedPacketsResponse: { + encode(message: _117.QueryUnreceivedPacketsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryUnreceivedPacketsResponse; + fromPartial(object: { + sequences?: (string | number | import("long").Long)[]; + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryUnreceivedPacketsResponse; + }; + QueryUnreceivedAcksRequest: { + encode(message: _117.QueryUnreceivedAcksRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryUnreceivedAcksRequest; + fromPartial(object: { + portId?: string; + channelId?: string; + packetAckSequences?: (string | number | import("long").Long)[]; + }): _117.QueryUnreceivedAcksRequest; + }; + QueryUnreceivedAcksResponse: { + encode(message: _117.QueryUnreceivedAcksResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryUnreceivedAcksResponse; + fromPartial(object: { + sequences?: (string | number | import("long").Long)[]; + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryUnreceivedAcksResponse; + }; + QueryNextSequenceReceiveRequest: { + encode(message: _117.QueryNextSequenceReceiveRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryNextSequenceReceiveRequest; + fromPartial(object: { + portId?: string; + channelId?: string; + }): _117.QueryNextSequenceReceiveRequest; + }; + QueryNextSequenceReceiveResponse: { + encode(message: _117.QueryNextSequenceReceiveResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _117.QueryNextSequenceReceiveResponse; + fromPartial(object: { + nextSequenceReceive?: string | number | import("long").Long; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _117.QueryNextSequenceReceiveResponse; + }; + GenesisState: { + encode(message: _116.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _116.GenesisState; + fromPartial(object: { + channels?: { + state?: _115.State; + ordering?: _115.Order; + counterparty?: { + portId?: string; + channelId?: string; + }; + connectionHops?: string[]; + version?: string; + portId?: string; + channelId?: string; + }[]; + acknowledgements?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + data?: Uint8Array; + }[]; + commitments?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + data?: Uint8Array; + }[]; + receipts?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + data?: Uint8Array; + }[]; + sendSequences?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + }[]; + recvSequences?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + }[]; + ackSequences?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + }[]; + nextChannelSequence?: string | number | import("long").Long; + }): _116.GenesisState; + }; + PacketSequence: { + encode(message: _116.PacketSequence, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _116.PacketSequence; + fromPartial(object: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + }): _116.PacketSequence; + }; + stateFromJSON(object: any): _115.State; + stateToJSON(object: _115.State): string; + orderFromJSON(object: any): _115.Order; + orderToJSON(object: _115.Order): string; + State: typeof _115.State; + StateSDKType: typeof _115.State; + Order: typeof _115.Order; + OrderSDKType: typeof _115.Order; + Channel: { + encode(message: _115.Channel, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _115.Channel; + fromPartial(object: { + state?: _115.State; + ordering?: _115.Order; + counterparty?: { + portId?: string; + channelId?: string; + }; + connectionHops?: string[]; + version?: string; + }): _115.Channel; + }; + IdentifiedChannel: { + encode(message: _115.IdentifiedChannel, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _115.IdentifiedChannel; + fromPartial(object: { + state?: _115.State; + ordering?: _115.Order; + counterparty?: { + portId?: string; + channelId?: string; + }; + connectionHops?: string[]; + version?: string; + portId?: string; + channelId?: string; + }): _115.IdentifiedChannel; + }; + Counterparty: { + encode(message: _115.Counterparty, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _115.Counterparty; + fromPartial(object: { + portId?: string; + channelId?: string; + }): _115.Counterparty; + }; + Packet: { + encode(message: _115.Packet, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _115.Packet; + fromPartial(object: { + sequence?: string | number | import("long").Long; + sourcePort?: string; + sourceChannel?: string; + destinationPort?: string; + destinationChannel?: string; + data?: Uint8Array; + timeoutHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + timeoutTimestamp?: string | number | import("long").Long; + }): _115.Packet; + }; + PacketState: { + encode(message: _115.PacketState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _115.PacketState; + fromPartial(object: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + data?: Uint8Array; + }): _115.PacketState; + }; + Acknowledgement: { + encode(message: _115.Acknowledgement, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _115.Acknowledgement; + fromPartial(object: { + result?: Uint8Array; + error?: string; + }): _115.Acknowledgement; + }; + }; + } + namespace client { + const v1: { + MsgClientImpl: typeof _208.MsgClientImpl; + QueryClientImpl: typeof _203.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + clientState(request: _121.QueryClientStateRequest): Promise<_121.QueryClientStateResponse>; + clientStates(request?: _121.QueryClientStatesRequest): Promise<_121.QueryClientStatesResponse>; + consensusState(request: _121.QueryConsensusStateRequest): Promise<_121.QueryConsensusStateResponse>; + consensusStates(request: _121.QueryConsensusStatesRequest): Promise<_121.QueryConsensusStatesResponse>; + clientStatus(request: _121.QueryClientStatusRequest): Promise<_121.QueryClientStatusResponse>; + clientParams(request?: _121.QueryClientParamsRequest): Promise<_121.QueryClientParamsResponse>; + upgradedClientState(request?: _121.QueryUpgradedClientStateRequest): Promise<_121.QueryUpgradedClientStateResponse>; + upgradedConsensusState(request?: _121.QueryUpgradedConsensusStateRequest): Promise<_121.QueryUpgradedConsensusStateResponse>; + }; + LCDQueryClient: typeof _199.LCDQueryClient; + MsgCreateClient: { + encode(message: _122.MsgCreateClient, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _122.MsgCreateClient; + fromPartial(object: { + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + consensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + signer?: string; + }): _122.MsgCreateClient; + }; + MsgCreateClientResponse: { + encode(_: _122.MsgCreateClientResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _122.MsgCreateClientResponse; + fromPartial(_: {}): _122.MsgCreateClientResponse; + }; + MsgUpdateClient: { + encode(message: _122.MsgUpdateClient, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _122.MsgUpdateClient; + fromPartial(object: { + clientId?: string; + header?: { + typeUrl?: string; + value?: Uint8Array; + }; + signer?: string; + }): _122.MsgUpdateClient; + }; + MsgUpdateClientResponse: { + encode(_: _122.MsgUpdateClientResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _122.MsgUpdateClientResponse; + fromPartial(_: {}): _122.MsgUpdateClientResponse; + }; + MsgUpgradeClient: { + encode(message: _122.MsgUpgradeClient, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _122.MsgUpgradeClient; + fromPartial(object: { + clientId?: string; + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + consensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + proofUpgradeClient?: Uint8Array; + proofUpgradeConsensusState?: Uint8Array; + signer?: string; + }): _122.MsgUpgradeClient; + }; + MsgUpgradeClientResponse: { + encode(_: _122.MsgUpgradeClientResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _122.MsgUpgradeClientResponse; + fromPartial(_: {}): _122.MsgUpgradeClientResponse; + }; + MsgSubmitMisbehaviour: { + encode(message: _122.MsgSubmitMisbehaviour, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _122.MsgSubmitMisbehaviour; + fromPartial(object: { + clientId?: string; + misbehaviour?: { + typeUrl?: string; + value?: Uint8Array; + }; + signer?: string; + }): _122.MsgSubmitMisbehaviour; + }; + MsgSubmitMisbehaviourResponse: { + encode(_: _122.MsgSubmitMisbehaviourResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _122.MsgSubmitMisbehaviourResponse; + fromPartial(_: {}): _122.MsgSubmitMisbehaviourResponse; + }; + QueryClientStateRequest: { + encode(message: _121.QueryClientStateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryClientStateRequest; + fromPartial(object: { + clientId?: string; + }): _121.QueryClientStateRequest; + }; + QueryClientStateResponse: { + encode(message: _121.QueryClientStateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryClientStateResponse; + fromPartial(object: { + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _121.QueryClientStateResponse; + }; + QueryClientStatesRequest: { + encode(message: _121.QueryClientStatesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryClientStatesRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _121.QueryClientStatesRequest; + }; + QueryClientStatesResponse: { + encode(message: _121.QueryClientStatesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryClientStatesResponse; + fromPartial(object: { + clientStates?: { + clientId?: string; + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _121.QueryClientStatesResponse; + }; + QueryConsensusStateRequest: { + encode(message: _121.QueryConsensusStateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryConsensusStateRequest; + fromPartial(object: { + clientId?: string; + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + latestHeight?: boolean; + }): _121.QueryConsensusStateRequest; + }; + QueryConsensusStateResponse: { + encode(message: _121.QueryConsensusStateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryConsensusStateResponse; + fromPartial(object: { + consensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _121.QueryConsensusStateResponse; + }; + QueryConsensusStatesRequest: { + encode(message: _121.QueryConsensusStatesRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryConsensusStatesRequest; + fromPartial(object: { + clientId?: string; + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _121.QueryConsensusStatesRequest; + }; + QueryConsensusStatesResponse: { + encode(message: _121.QueryConsensusStatesResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryConsensusStatesResponse; + fromPartial(object: { + consensusStates?: { + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + consensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + }): _121.QueryConsensusStatesResponse; + }; + QueryClientStatusRequest: { + encode(message: _121.QueryClientStatusRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryClientStatusRequest; + fromPartial(object: { + clientId?: string; + }): _121.QueryClientStatusRequest; + }; + QueryClientStatusResponse: { + encode(message: _121.QueryClientStatusResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryClientStatusResponse; + fromPartial(object: { + status?: string; + }): _121.QueryClientStatusResponse; + }; + QueryClientParamsRequest: { + encode(_: _121.QueryClientParamsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryClientParamsRequest; + fromPartial(_: {}): _121.QueryClientParamsRequest; + }; + QueryClientParamsResponse: { + encode(message: _121.QueryClientParamsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryClientParamsResponse; + fromPartial(object: { + params?: { + allowedClients?: string[]; + }; + }): _121.QueryClientParamsResponse; + }; + QueryUpgradedClientStateRequest: { + encode(_: _121.QueryUpgradedClientStateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryUpgradedClientStateRequest; + fromPartial(_: {}): _121.QueryUpgradedClientStateRequest; + }; + QueryUpgradedClientStateResponse: { + encode(message: _121.QueryUpgradedClientStateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryUpgradedClientStateResponse; + fromPartial(object: { + upgradedClientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _121.QueryUpgradedClientStateResponse; + }; + QueryUpgradedConsensusStateRequest: { + encode(_: _121.QueryUpgradedConsensusStateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryUpgradedConsensusStateRequest; + fromPartial(_: {}): _121.QueryUpgradedConsensusStateRequest; + }; + QueryUpgradedConsensusStateResponse: { + encode(message: _121.QueryUpgradedConsensusStateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _121.QueryUpgradedConsensusStateResponse; + fromPartial(object: { + upgradedConsensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _121.QueryUpgradedConsensusStateResponse; + }; + GenesisState: { + encode(message: _120.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _120.GenesisState; + fromPartial(object: { + clients?: { + clientId?: string; + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + clientsConsensus?: { + clientId?: string; + consensusStates?: { + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + consensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + }[]; + clientsMetadata?: { + clientId?: string; + clientMetadata?: { + key?: Uint8Array; + value?: Uint8Array; + }[]; + }[]; + params?: { + allowedClients?: string[]; + }; + createLocalhost?: boolean; + nextClientSequence?: string | number | import("long").Long; + }): _120.GenesisState; + }; + GenesisMetadata: { + encode(message: _120.GenesisMetadata, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _120.GenesisMetadata; + fromPartial(object: { + key?: Uint8Array; + value?: Uint8Array; + }): _120.GenesisMetadata; + }; + IdentifiedGenesisMetadata: { + encode(message: _120.IdentifiedGenesisMetadata, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _120.IdentifiedGenesisMetadata; + fromPartial(object: { + clientId?: string; + clientMetadata?: { + key?: Uint8Array; + value?: Uint8Array; + }[]; + }): _120.IdentifiedGenesisMetadata; + }; + IdentifiedClientState: { + encode(message: _119.IdentifiedClientState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _119.IdentifiedClientState; + fromPartial(object: { + clientId?: string; + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _119.IdentifiedClientState; + }; + ConsensusStateWithHeight: { + encode(message: _119.ConsensusStateWithHeight, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _119.ConsensusStateWithHeight; + fromPartial(object: { + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + consensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _119.ConsensusStateWithHeight; + }; + ClientConsensusStates: { + encode(message: _119.ClientConsensusStates, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _119.ClientConsensusStates; + fromPartial(object: { + clientId?: string; + consensusStates?: { + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + consensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + }): _119.ClientConsensusStates; + }; + ClientUpdateProposal: { + encode(message: _119.ClientUpdateProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _119.ClientUpdateProposal; + fromPartial(object: { + title?: string; + description?: string; + subjectClientId?: string; + substituteClientId?: string; + }): _119.ClientUpdateProposal; + }; + UpgradeProposal: { + encode(message: _119.UpgradeProposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _119.UpgradeProposal; + fromPartial(object: { + title?: string; + description?: string; + plan?: { + name?: string; + time?: Date; + height?: string | number | import("long").Long; + info?: string; + upgradedClientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }; + upgradedClientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _119.UpgradeProposal; + }; + Height: { + encode(message: _119.Height, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _119.Height; + fromPartial(object: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }): _119.Height; + }; + Params: { + encode(message: _119.Params, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _119.Params; + fromPartial(object: { + allowedClients?: string[]; + }): _119.Params; + }; + }; + } + namespace commitment { + const v1: { + MerkleRoot: { + encode(message: _123.MerkleRoot, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _123.MerkleRoot; + fromPartial(object: { + hash?: Uint8Array; + }): _123.MerkleRoot; + }; + MerklePrefix: { + encode(message: _123.MerklePrefix, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _123.MerklePrefix; + fromPartial(object: { + keyPrefix?: Uint8Array; + }): _123.MerklePrefix; + }; + MerklePath: { + encode(message: _123.MerklePath, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _123.MerklePath; + fromPartial(object: { + keyPath?: string[]; + }): _123.MerklePath; + }; + MerkleProof: { + encode(message: _123.MerkleProof, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _123.MerkleProof; + fromPartial(object: { + proofs?: { + exist?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: import("../confio/proofs").HashOp; + prehashKey?: import("../confio/proofs").HashOp; + prehashValue?: import("../confio/proofs").HashOp; + length?: import("../confio/proofs").LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: import("../confio/proofs").HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + nonexist?: { + key?: Uint8Array; + left?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: import("../confio/proofs").HashOp; + prehashKey?: import("../confio/proofs").HashOp; + prehashValue?: import("../confio/proofs").HashOp; + length?: import("../confio/proofs").LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: import("../confio/proofs").HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + right?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: import("../confio/proofs").HashOp; + prehashKey?: import("../confio/proofs").HashOp; + prehashValue?: import("../confio/proofs").HashOp; + length?: import("../confio/proofs").LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: import("../confio/proofs").HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + }; + batch?: { + entries?: { + exist?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: import("../confio/proofs").HashOp; + prehashKey?: import("../confio/proofs").HashOp; + prehashValue?: import("../confio/proofs").HashOp; + length?: import("../confio/proofs").LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: import("../confio/proofs").HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + nonexist?: { + key?: Uint8Array; + left?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: import("../confio/proofs").HashOp; + prehashKey?: import("../confio/proofs").HashOp; + prehashValue?: import("../confio/proofs").HashOp; + length?: import("../confio/proofs").LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: import("../confio/proofs").HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + right?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: import("../confio/proofs").HashOp; + prehashKey?: import("../confio/proofs").HashOp; + prehashValue?: import("../confio/proofs").HashOp; + length?: import("../confio/proofs").LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: import("../confio/proofs").HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + }; + }[]; + }; + compressed?: { + entries?: { + exist?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: import("../confio/proofs").HashOp; + prehashKey?: import("../confio/proofs").HashOp; + prehashValue?: import("../confio/proofs").HashOp; + length?: import("../confio/proofs").LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + nonexist?: { + key?: Uint8Array; + left?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: import("../confio/proofs").HashOp; + prehashKey?: import("../confio/proofs").HashOp; + prehashValue?: import("../confio/proofs").HashOp; + length?: import("../confio/proofs").LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + right?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: import("../confio/proofs").HashOp; + prehashKey?: import("../confio/proofs").HashOp; + prehashValue?: import("../confio/proofs").HashOp; + length?: import("../confio/proofs").LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + }; + }[]; + lookupInners?: { + hash?: import("../confio/proofs").HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + }[]; + }): _123.MerkleProof; + }; + }; + } + namespace connection { + const v1: { + MsgClientImpl: typeof _209.MsgClientImpl; + QueryClientImpl: typeof _204.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + connection(request: _126.QueryConnectionRequest): Promise<_126.QueryConnectionResponse>; + connections(request?: _126.QueryConnectionsRequest): Promise<_126.QueryConnectionsResponse>; + clientConnections(request: _126.QueryClientConnectionsRequest): Promise<_126.QueryClientConnectionsResponse>; + connectionClientState(request: _126.QueryConnectionClientStateRequest): Promise<_126.QueryConnectionClientStateResponse>; + connectionConsensusState(request: _126.QueryConnectionConsensusStateRequest): Promise<_126.QueryConnectionConsensusStateResponse>; + }; + LCDQueryClient: typeof _200.LCDQueryClient; + MsgConnectionOpenInit: { + encode(message: _127.MsgConnectionOpenInit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _127.MsgConnectionOpenInit; + fromPartial(object: { + clientId?: string; + counterparty?: { + clientId?: string; + connectionId?: string; + prefix?: { + keyPrefix?: Uint8Array; + }; + }; + version?: { + identifier?: string; + features?: string[]; + }; + delayPeriod?: string | number | import("long").Long; + signer?: string; + }): _127.MsgConnectionOpenInit; + }; + MsgConnectionOpenInitResponse: { + encode(_: _127.MsgConnectionOpenInitResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _127.MsgConnectionOpenInitResponse; + fromPartial(_: {}): _127.MsgConnectionOpenInitResponse; + }; + MsgConnectionOpenTry: { + encode(message: _127.MsgConnectionOpenTry, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _127.MsgConnectionOpenTry; + fromPartial(object: { + clientId?: string; + previousConnectionId?: string; + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + counterparty?: { + clientId?: string; + connectionId?: string; + prefix?: { + keyPrefix?: Uint8Array; + }; + }; + delayPeriod?: string | number | import("long").Long; + counterpartyVersions?: { + identifier?: string; + features?: string[]; + }[]; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + proofInit?: Uint8Array; + proofClient?: Uint8Array; + proofConsensus?: Uint8Array; + consensusHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + signer?: string; + }): _127.MsgConnectionOpenTry; + }; + MsgConnectionOpenTryResponse: { + encode(_: _127.MsgConnectionOpenTryResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _127.MsgConnectionOpenTryResponse; + fromPartial(_: {}): _127.MsgConnectionOpenTryResponse; + }; + MsgConnectionOpenAck: { + encode(message: _127.MsgConnectionOpenAck, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _127.MsgConnectionOpenAck; + fromPartial(object: { + connectionId?: string; + counterpartyConnectionId?: string; + version?: { + identifier?: string; + features?: string[]; + }; + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + proofTry?: Uint8Array; + proofClient?: Uint8Array; + proofConsensus?: Uint8Array; + consensusHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + signer?: string; + }): _127.MsgConnectionOpenAck; + }; + MsgConnectionOpenAckResponse: { + encode(_: _127.MsgConnectionOpenAckResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _127.MsgConnectionOpenAckResponse; + fromPartial(_: {}): _127.MsgConnectionOpenAckResponse; + }; + MsgConnectionOpenConfirm: { + encode(message: _127.MsgConnectionOpenConfirm, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _127.MsgConnectionOpenConfirm; + fromPartial(object: { + connectionId?: string; + proofAck?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + signer?: string; + }): _127.MsgConnectionOpenConfirm; + }; + MsgConnectionOpenConfirmResponse: { + encode(_: _127.MsgConnectionOpenConfirmResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _127.MsgConnectionOpenConfirmResponse; + fromPartial(_: {}): _127.MsgConnectionOpenConfirmResponse; + }; + QueryConnectionRequest: { + encode(message: _126.QueryConnectionRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _126.QueryConnectionRequest; + fromPartial(object: { + connectionId?: string; + }): _126.QueryConnectionRequest; + }; + QueryConnectionResponse: { + encode(message: _126.QueryConnectionResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _126.QueryConnectionResponse; + fromPartial(object: { + connection?: { + clientId?: string; + versions?: { + identifier?: string; + features?: string[]; + }[]; + state?: _124.State; + counterparty?: { + clientId?: string; + connectionId?: string; + prefix?: { + keyPrefix?: Uint8Array; + }; + }; + delayPeriod?: string | number | import("long").Long; + }; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _126.QueryConnectionResponse; + }; + QueryConnectionsRequest: { + encode(message: _126.QueryConnectionsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _126.QueryConnectionsRequest; + fromPartial(object: { + pagination?: { + key?: Uint8Array; + offset?: string | number | import("long").Long; + limit?: string | number | import("long").Long; + countTotal?: boolean; + reverse?: boolean; + }; + }): _126.QueryConnectionsRequest; + }; + QueryConnectionsResponse: { + encode(message: _126.QueryConnectionsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _126.QueryConnectionsResponse; + fromPartial(object: { + connections?: { + id?: string; + clientId?: string; + versions?: { + identifier?: string; + features?: string[]; + }[]; + state?: _124.State; + counterparty?: { + clientId?: string; + connectionId?: string; + prefix?: { + keyPrefix?: Uint8Array; + }; + }; + delayPeriod?: string | number | import("long").Long; + }[]; + pagination?: { + nextKey?: Uint8Array; + total?: string | number | import("long").Long; + }; + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _126.QueryConnectionsResponse; + }; + QueryClientConnectionsRequest: { + encode(message: _126.QueryClientConnectionsRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _126.QueryClientConnectionsRequest; + fromPartial(object: { + clientId?: string; + }): _126.QueryClientConnectionsRequest; + }; + QueryClientConnectionsResponse: { + encode(message: _126.QueryClientConnectionsResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _126.QueryClientConnectionsResponse; + fromPartial(object: { + connectionPaths?: string[]; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _126.QueryClientConnectionsResponse; + }; + QueryConnectionClientStateRequest: { + encode(message: _126.QueryConnectionClientStateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _126.QueryConnectionClientStateRequest; + fromPartial(object: { + connectionId?: string; + }): _126.QueryConnectionClientStateRequest; + }; + QueryConnectionClientStateResponse: { + encode(message: _126.QueryConnectionClientStateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _126.QueryConnectionClientStateResponse; + fromPartial(object: { + identifiedClientState?: { + clientId?: string; + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _126.QueryConnectionClientStateResponse; + }; + QueryConnectionConsensusStateRequest: { + encode(message: _126.QueryConnectionConsensusStateRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _126.QueryConnectionConsensusStateRequest; + fromPartial(object: { + connectionId?: string; + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }): _126.QueryConnectionConsensusStateRequest; + }; + QueryConnectionConsensusStateResponse: { + encode(message: _126.QueryConnectionConsensusStateResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _126.QueryConnectionConsensusStateResponse; + fromPartial(object: { + consensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + clientId?: string; + proof?: Uint8Array; + proofHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _126.QueryConnectionConsensusStateResponse; + }; + GenesisState: { + encode(message: _125.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _125.GenesisState; + fromPartial(object: { + connections?: { + id?: string; + clientId?: string; + versions?: { + identifier?: string; + features?: string[]; + }[]; + state?: _124.State; + counterparty?: { + clientId?: string; + connectionId?: string; + prefix?: { + keyPrefix?: Uint8Array; + }; + }; + delayPeriod?: string | number | import("long").Long; + }[]; + clientConnectionPaths?: { + clientId?: string; + paths?: string[]; + }[]; + nextConnectionSequence?: string | number | import("long").Long; + params?: { + maxExpectedTimePerBlock?: string | number | import("long").Long; + }; + }): _125.GenesisState; + }; + stateFromJSON(object: any): _124.State; + stateToJSON(object: _124.State): string; + State: typeof _124.State; + StateSDKType: typeof _124.State; + ConnectionEnd: { + encode(message: _124.ConnectionEnd, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _124.ConnectionEnd; + fromPartial(object: { + clientId?: string; + versions?: { + identifier?: string; + features?: string[]; + }[]; + state?: _124.State; + counterparty?: { + clientId?: string; + connectionId?: string; + prefix?: { + keyPrefix?: Uint8Array; + }; + }; + delayPeriod?: string | number | import("long").Long; + }): _124.ConnectionEnd; + }; + IdentifiedConnection: { + encode(message: _124.IdentifiedConnection, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _124.IdentifiedConnection; + fromPartial(object: { + id?: string; + clientId?: string; + versions?: { + identifier?: string; + features?: string[]; + }[]; + state?: _124.State; + counterparty?: { + clientId?: string; + connectionId?: string; + prefix?: { + keyPrefix?: Uint8Array; + }; + }; + delayPeriod?: string | number | import("long").Long; + }): _124.IdentifiedConnection; + }; + Counterparty: { + encode(message: _124.Counterparty, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _124.Counterparty; + fromPartial(object: { + clientId?: string; + connectionId?: string; + prefix?: { + keyPrefix?: Uint8Array; + }; + }): _124.Counterparty; + }; + ClientPaths: { + encode(message: _124.ClientPaths, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _124.ClientPaths; + fromPartial(object: { + paths?: string[]; + }): _124.ClientPaths; + }; + ConnectionPaths: { + encode(message: _124.ConnectionPaths, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _124.ConnectionPaths; + fromPartial(object: { + clientId?: string; + paths?: string[]; + }): _124.ConnectionPaths; + }; + Version: { + encode(message: _124.Version, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _124.Version; + fromPartial(object: { + identifier?: string; + features?: string[]; + }): _124.Version; + }; + Params: { + encode(message: _124.Params, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _124.Params; + fromPartial(object: { + maxExpectedTimePerBlock?: string | number | import("long").Long; + }): _124.Params; + }; + }; + } + namespace port { + const v1: { + QueryClientImpl: typeof _205.QueryClientImpl; + createRpcQueryExtension: (base: import("@cosmjs/stargate").QueryClient) => { + appVersion(request: _128.QueryAppVersionRequest): Promise<_128.QueryAppVersionResponse>; + }; + QueryAppVersionRequest: { + encode(message: _128.QueryAppVersionRequest, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _128.QueryAppVersionRequest; + fromPartial(object: { + portId?: string; + connectionId?: string; + ordering?: _115.Order; + counterparty?: { + portId?: string; + channelId?: string; + }; + proposedVersion?: string; + }): _128.QueryAppVersionRequest; + }; + QueryAppVersionResponse: { + encode(message: _128.QueryAppVersionResponse, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _128.QueryAppVersionResponse; + fromPartial(object: { + portId?: string; + version?: string; + }): _128.QueryAppVersionResponse; + }; + }; + } + namespace types { + const v1: { + GenesisState: { + encode(message: _129.GenesisState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _129.GenesisState; + fromPartial(object: { + clientGenesis?: { + clients?: { + clientId?: string; + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + clientsConsensus?: { + clientId?: string; + consensusStates?: { + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + consensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }[]; + }[]; + clientsMetadata?: { + clientId?: string; + clientMetadata?: { + key?: Uint8Array; + value?: Uint8Array; + }[]; + }[]; + params?: { + allowedClients?: string[]; + }; + createLocalhost?: boolean; + nextClientSequence?: string | number | import("long").Long; + }; + connectionGenesis?: { + connections?: { + id?: string; + clientId?: string; + versions?: { + identifier?: string; + features?: string[]; + }[]; + state?: _124.State; + counterparty?: { + clientId?: string; + connectionId?: string; + prefix?: { + keyPrefix?: Uint8Array; + }; + }; + delayPeriod?: string | number | import("long").Long; + }[]; + clientConnectionPaths?: { + clientId?: string; + paths?: string[]; + }[]; + nextConnectionSequence?: string | number | import("long").Long; + params?: { + maxExpectedTimePerBlock?: string | number | import("long").Long; + }; + }; + channelGenesis?: { + channels?: { + state?: _115.State; + ordering?: _115.Order; + counterparty?: { + portId?: string; + channelId?: string; + }; + connectionHops?: string[]; + version?: string; + portId?: string; + channelId?: string; + }[]; + acknowledgements?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + data?: Uint8Array; + }[]; + commitments?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + data?: Uint8Array; + }[]; + receipts?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + data?: Uint8Array; + }[]; + sendSequences?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + }[]; + recvSequences?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + }[]; + ackSequences?: { + portId?: string; + channelId?: string; + sequence?: string | number | import("long").Long; + }[]; + nextChannelSequence?: string | number | import("long").Long; + }; + }): _129.GenesisState; + }; + }; + } + } + namespace lightclients { + namespace localhost { + const v1: { + ClientState: { + encode(message: _130.ClientState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _130.ClientState; + fromPartial(object: { + chainId?: string; + height?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + }): _130.ClientState; + }; + }; + } + namespace solomachine { + const v1: { + dataTypeFromJSON(object: any): _131.DataType; + dataTypeToJSON(object: _131.DataType): string; + DataType: typeof _131.DataType; + DataTypeSDKType: typeof _131.DataType; + ClientState: { + encode(message: _131.ClientState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.ClientState; + fromPartial(object: { + sequence?: string | number | import("long").Long; + frozenSequence?: string | number | import("long").Long; + consensusState?: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + diversifier?: string; + timestamp?: string | number | import("long").Long; + }; + allowUpdateAfterProposal?: boolean; + }): _131.ClientState; + }; + ConsensusState: { + encode(message: _131.ConsensusState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.ConsensusState; + fromPartial(object: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + diversifier?: string; + timestamp?: string | number | import("long").Long; + }): _131.ConsensusState; + }; + Header: { + encode(message: _131.Header, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.Header; + fromPartial(object: { + sequence?: string | number | import("long").Long; + timestamp?: string | number | import("long").Long; + signature?: Uint8Array; + newPublicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + newDiversifier?: string; + }): _131.Header; + }; + Misbehaviour: { + encode(message: _131.Misbehaviour, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.Misbehaviour; + fromPartial(object: { + clientId?: string; + sequence?: string | number | import("long").Long; + signatureOne?: { + signature?: Uint8Array; + dataType?: _131.DataType; + data?: Uint8Array; + timestamp?: string | number | import("long").Long; + }; + signatureTwo?: { + signature?: Uint8Array; + dataType?: _131.DataType; + data?: Uint8Array; + timestamp?: string | number | import("long").Long; + }; + }): _131.Misbehaviour; + }; + SignatureAndData: { + encode(message: _131.SignatureAndData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.SignatureAndData; + fromPartial(object: { + signature?: Uint8Array; + dataType?: _131.DataType; + data?: Uint8Array; + timestamp?: string | number | import("long").Long; + }): _131.SignatureAndData; + }; + TimestampedSignatureData: { + encode(message: _131.TimestampedSignatureData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.TimestampedSignatureData; + fromPartial(object: { + signatureData?: Uint8Array; + timestamp?: string | number | import("long").Long; + }): _131.TimestampedSignatureData; + }; + SignBytes: { + encode(message: _131.SignBytes, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.SignBytes; + fromPartial(object: { + sequence?: string | number | import("long").Long; + timestamp?: string | number | import("long").Long; + diversifier?: string; + dataType?: _131.DataType; + data?: Uint8Array; + }): _131.SignBytes; + }; + HeaderData: { + encode(message: _131.HeaderData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.HeaderData; + fromPartial(object: { + newPubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + newDiversifier?: string; + }): _131.HeaderData; + }; + ClientStateData: { + encode(message: _131.ClientStateData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.ClientStateData; + fromPartial(object: { + path?: Uint8Array; + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _131.ClientStateData; + }; + ConsensusStateData: { + encode(message: _131.ConsensusStateData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.ConsensusStateData; + fromPartial(object: { + path?: Uint8Array; + consensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _131.ConsensusStateData; + }; + ConnectionStateData: { + encode(message: _131.ConnectionStateData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.ConnectionStateData; + fromPartial(object: { + path?: Uint8Array; + connection?: { + clientId?: string; + versions?: { + identifier?: string; + features?: string[]; + }[]; + state?: _124.State; + counterparty?: { + clientId?: string; + connectionId?: string; + prefix?: { + keyPrefix?: Uint8Array; + }; + }; + delayPeriod?: string | number | import("long").Long; + }; + }): _131.ConnectionStateData; + }; + ChannelStateData: { + encode(message: _131.ChannelStateData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.ChannelStateData; + fromPartial(object: { + path?: Uint8Array; + channel?: { + state?: _115.State; + ordering?: _115.Order; + counterparty?: { + portId?: string; + channelId?: string; + }; + connectionHops?: string[]; + version?: string; + }; + }): _131.ChannelStateData; + }; + PacketCommitmentData: { + encode(message: _131.PacketCommitmentData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.PacketCommitmentData; + fromPartial(object: { + path?: Uint8Array; + commitment?: Uint8Array; + }): _131.PacketCommitmentData; + }; + PacketAcknowledgementData: { + encode(message: _131.PacketAcknowledgementData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.PacketAcknowledgementData; + fromPartial(object: { + path?: Uint8Array; + acknowledgement?: Uint8Array; + }): _131.PacketAcknowledgementData; + }; + PacketReceiptAbsenceData: { + encode(message: _131.PacketReceiptAbsenceData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.PacketReceiptAbsenceData; + fromPartial(object: { + path?: Uint8Array; + }): _131.PacketReceiptAbsenceData; + }; + NextSequenceRecvData: { + encode(message: _131.NextSequenceRecvData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _131.NextSequenceRecvData; + fromPartial(object: { + path?: Uint8Array; + nextSeqRecv?: string | number | import("long").Long; + }): _131.NextSequenceRecvData; + }; + }; + const v2: { + dataTypeFromJSON(object: any): _132.DataType; + dataTypeToJSON(object: _132.DataType): string; + DataType: typeof _132.DataType; + DataTypeSDKType: typeof _132.DataType; + ClientState: { + encode(message: _132.ClientState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.ClientState; + fromPartial(object: { + sequence?: string | number | import("long").Long; + isFrozen?: boolean; + consensusState?: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + diversifier?: string; + timestamp?: string | number | import("long").Long; + }; + allowUpdateAfterProposal?: boolean; + }): _132.ClientState; + }; + ConsensusState: { + encode(message: _132.ConsensusState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.ConsensusState; + fromPartial(object: { + publicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + diversifier?: string; + timestamp?: string | number | import("long").Long; + }): _132.ConsensusState; + }; + Header: { + encode(message: _132.Header, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.Header; + fromPartial(object: { + sequence?: string | number | import("long").Long; + timestamp?: string | number | import("long").Long; + signature?: Uint8Array; + newPublicKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + newDiversifier?: string; + }): _132.Header; + }; + Misbehaviour: { + encode(message: _132.Misbehaviour, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.Misbehaviour; + fromPartial(object: { + clientId?: string; + sequence?: string | number | import("long").Long; + signatureOne?: { + signature?: Uint8Array; + dataType?: _132.DataType; + data?: Uint8Array; + timestamp?: string | number | import("long").Long; + }; + signatureTwo?: { + signature?: Uint8Array; + dataType?: _132.DataType; + data?: Uint8Array; + timestamp?: string | number | import("long").Long; + }; + }): _132.Misbehaviour; + }; + SignatureAndData: { + encode(message: _132.SignatureAndData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.SignatureAndData; + fromPartial(object: { + signature?: Uint8Array; + dataType?: _132.DataType; + data?: Uint8Array; + timestamp?: string | number | import("long").Long; + }): _132.SignatureAndData; + }; + TimestampedSignatureData: { + encode(message: _132.TimestampedSignatureData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.TimestampedSignatureData; + fromPartial(object: { + signatureData?: Uint8Array; + timestamp?: string | number | import("long").Long; + }): _132.TimestampedSignatureData; + }; + SignBytes: { + encode(message: _132.SignBytes, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.SignBytes; + fromPartial(object: { + sequence?: string | number | import("long").Long; + timestamp?: string | number | import("long").Long; + diversifier?: string; + dataType?: _132.DataType; + data?: Uint8Array; + }): _132.SignBytes; + }; + HeaderData: { + encode(message: _132.HeaderData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.HeaderData; + fromPartial(object: { + newPubKey?: { + typeUrl?: string; + value?: Uint8Array; + }; + newDiversifier?: string; + }): _132.HeaderData; + }; + ClientStateData: { + encode(message: _132.ClientStateData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.ClientStateData; + fromPartial(object: { + path?: Uint8Array; + clientState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _132.ClientStateData; + }; + ConsensusStateData: { + encode(message: _132.ConsensusStateData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.ConsensusStateData; + fromPartial(object: { + path?: Uint8Array; + consensusState?: { + typeUrl?: string; + value?: Uint8Array; + }; + }): _132.ConsensusStateData; + }; + ConnectionStateData: { + encode(message: _132.ConnectionStateData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.ConnectionStateData; + fromPartial(object: { + path?: Uint8Array; + connection?: { + clientId?: string; + versions?: { + identifier?: string; + features?: string[]; + }[]; + state?: _124.State; + counterparty?: { + clientId?: string; + connectionId?: string; + prefix?: { + keyPrefix?: Uint8Array; + }; + }; + delayPeriod?: string | number | import("long").Long; + }; + }): _132.ConnectionStateData; + }; + ChannelStateData: { + encode(message: _132.ChannelStateData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.ChannelStateData; + fromPartial(object: { + path?: Uint8Array; + channel?: { + state?: _115.State; + ordering?: _115.Order; + counterparty?: { + portId?: string; + channelId?: string; + }; + connectionHops?: string[]; + version?: string; + }; + }): _132.ChannelStateData; + }; + PacketCommitmentData: { + encode(message: _132.PacketCommitmentData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.PacketCommitmentData; + fromPartial(object: { + path?: Uint8Array; + commitment?: Uint8Array; + }): _132.PacketCommitmentData; + }; + PacketAcknowledgementData: { + encode(message: _132.PacketAcknowledgementData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.PacketAcknowledgementData; + fromPartial(object: { + path?: Uint8Array; + acknowledgement?: Uint8Array; + }): _132.PacketAcknowledgementData; + }; + PacketReceiptAbsenceData: { + encode(message: _132.PacketReceiptAbsenceData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.PacketReceiptAbsenceData; + fromPartial(object: { + path?: Uint8Array; + }): _132.PacketReceiptAbsenceData; + }; + NextSequenceRecvData: { + encode(message: _132.NextSequenceRecvData, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _132.NextSequenceRecvData; + fromPartial(object: { + path?: Uint8Array; + nextSeqRecv?: string | number | import("long").Long; + }): _132.NextSequenceRecvData; + }; + }; + } + namespace tendermint { + const v1: { + ClientState: { + encode(message: _133.ClientState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _133.ClientState; + fromPartial(object: { + chainId?: string; + trustLevel?: { + numerator?: string | number | import("long").Long; + denominator?: string | number | import("long").Long; + }; + trustingPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + unbondingPeriod?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxClockDrift?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + frozenHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + latestHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + proofSpecs?: { + leafSpec?: { + hash?: import("../confio/proofs").HashOp; + prehashKey?: import("../confio/proofs").HashOp; + prehashValue?: import("../confio/proofs").HashOp; + length?: import("../confio/proofs").LengthOp; + prefix?: Uint8Array; + }; + innerSpec?: { + childOrder?: number[]; + childSize?: number; + minPrefixLength?: number; + maxPrefixLength?: number; + emptyChild?: Uint8Array; + hash?: import("../confio/proofs").HashOp; + }; + maxDepth?: number; + minDepth?: number; + }[]; + upgradePath?: string[]; + allowUpdateAfterExpiry?: boolean; + allowUpdateAfterMisbehaviour?: boolean; + }): _133.ClientState; + }; + ConsensusState: { + encode(message: _133.ConsensusState, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _133.ConsensusState; + fromPartial(object: { + timestamp?: Date; + root?: { + hash?: Uint8Array; + }; + nextValidatorsHash?: Uint8Array; + }): _133.ConsensusState; + }; + Misbehaviour: { + encode(message: _133.Misbehaviour, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _133.Misbehaviour; + fromPartial(object: { + clientId?: string; + header1?: { + signedHeader?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + commit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: import("../tendermint/types/types").BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + validatorSet?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + trustedHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + trustedValidators?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + }; + header2?: { + signedHeader?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + commit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: import("../tendermint/types/types").BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + validatorSet?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + trustedHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + trustedValidators?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + }; + }): _133.Misbehaviour; + }; + Header: { + encode(message: _133.Header, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _133.Header; + fromPartial(object: { + signedHeader?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + commit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: import("../tendermint/types/types").BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + validatorSet?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + trustedHeight?: { + revisionNumber?: string | number | import("long").Long; + revisionHeight?: string | number | import("long").Long; + }; + trustedValidators?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + }): _133.Header; + }; + Fraction: { + encode(message: _133.Fraction, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _133.Fraction; + fromPartial(object: { + numerator?: string | number | import("long").Long; + denominator?: string | number | import("long").Long; + }): _133.Fraction; + }; + }; + } + } + const ClientFactory: { + createRPCMsgClient: ({ rpc }: { + rpc: import("../helpers").Rpc; + }) => Promise<{ + cosmos: { + authz: { + v1beta1: import("../cosmos/authz/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + bank: { + v1beta1: import("../cosmos/bank/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + crisis: { + v1beta1: import("../cosmos/crisis/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + distribution: { + v1beta1: import("../cosmos/distribution/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + evidence: { + v1beta1: import("../cosmos/evidence/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + feegrant: { + v1beta1: import("../cosmos/feegrant/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + gov: { + v1: import("../cosmos/gov/v1/tx.rpc.msg").MsgClientImpl; + v1beta1: import("../cosmos/gov/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + group: { + v1: import("../cosmos/group/v1/tx.rpc.msg").MsgClientImpl; + }; + nft: { + v1beta1: import("../cosmos/nft/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + slashing: { + v1beta1: import("../cosmos/slashing/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + staking: { + v1beta1: import("../cosmos/staking/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + upgrade: { + v1beta1: import("../cosmos/upgrade/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + vesting: { + v1beta1: import("../cosmos/vesting/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + }; + ibc: { + applications: { + transfer: { + v1: _206.MsgClientImpl; + }; + }; + core: { + channel: { + v1: _207.MsgClientImpl; + }; + client: { + v1: _208.MsgClientImpl; + }; + connection: { + v1: _209.MsgClientImpl; + }; + }; + }; + }>; + createRPCQueryClient: ({ rpcEndpoint }: { + rpcEndpoint: string | import("@cosmjs/tendermint-rpc").HttpEndpoint; + }) => Promise<{ + cosmos: { + app: { + v1alpha1: { + config(request?: import("../cosmos/app/v1alpha1/query").QueryConfigRequest): Promise; + }; + }; + auth: { + v1beta1: { + accounts(request?: import("../cosmos/auth/v1beta1/query").QueryAccountsRequest): Promise; + account(request: import("../cosmos/auth/v1beta1/query").QueryAccountRequest): Promise; + params(request?: import("../cosmos/auth/v1beta1/query").QueryParamsRequest): Promise; + moduleAccounts(request?: import("../cosmos/auth/v1beta1/query").QueryModuleAccountsRequest): Promise; + bech32Prefix(request?: import("../cosmos/auth/v1beta1/query").Bech32PrefixRequest): Promise; + addressBytesToString(request: import("../cosmos/auth/v1beta1/query").AddressBytesToStringRequest): Promise; + addressStringToBytes(request: import("../cosmos/auth/v1beta1/query").AddressStringToBytesRequest): Promise; + }; + }; + authz: { + v1beta1: { + grants(request: import("../cosmos/authz/v1beta1/query").QueryGrantsRequest): Promise; + granterGrants(request: import("../cosmos/authz/v1beta1/query").QueryGranterGrantsRequest): Promise; + granteeGrants(request: import("../cosmos/authz/v1beta1/query").QueryGranteeGrantsRequest): Promise; + }; + }; + bank: { + v1beta1: { + balance(request: import("../cosmos/bank/v1beta1/query").QueryBalanceRequest): Promise; + allBalances(request: import("../cosmos/bank/v1beta1/query").QueryAllBalancesRequest): Promise; + spendableBalances(request: import("../cosmos/bank/v1beta1/query").QuerySpendableBalancesRequest): Promise; + totalSupply(request?: import("../cosmos/bank/v1beta1/query").QueryTotalSupplyRequest): Promise; + supplyOf(request: import("../cosmos/bank/v1beta1/query").QuerySupplyOfRequest): Promise; + params(request?: import("../cosmos/bank/v1beta1/query").QueryParamsRequest): Promise; + denomMetadata(request: import("../cosmos/bank/v1beta1/query").QueryDenomMetadataRequest): Promise; + denomsMetadata(request?: import("../cosmos/bank/v1beta1/query").QueryDenomsMetadataRequest): Promise; + denomOwners(request: import("../cosmos/bank/v1beta1/query").QueryDenomOwnersRequest): Promise; + }; + }; + base: { + tendermint: { + v1beta1: { + getNodeInfo(request?: import("../cosmos/base/tendermint/v1beta1/query").GetNodeInfoRequest): Promise; + getSyncing(request?: import("../cosmos/base/tendermint/v1beta1/query").GetSyncingRequest): Promise; + getLatestBlock(request?: import("../cosmos/base/tendermint/v1beta1/query").GetLatestBlockRequest): Promise; + getBlockByHeight(request: import("../cosmos/base/tendermint/v1beta1/query").GetBlockByHeightRequest): Promise; + getLatestValidatorSet(request?: import("../cosmos/base/tendermint/v1beta1/query").GetLatestValidatorSetRequest): Promise; + getValidatorSetByHeight(request: import("../cosmos/base/tendermint/v1beta1/query").GetValidatorSetByHeightRequest): Promise; + }; + }; + }; + distribution: { + v1beta1: { + params(request?: import("../cosmos/distribution/v1beta1/query").QueryParamsRequest): Promise; + validatorOutstandingRewards(request: import("../cosmos/distribution/v1beta1/query").QueryValidatorOutstandingRewardsRequest): Promise; + validatorCommission(request: import("../cosmos/distribution/v1beta1/query").QueryValidatorCommissionRequest): Promise; + validatorSlashes(request: import("../cosmos/distribution/v1beta1/query").QueryValidatorSlashesRequest): Promise; + delegationRewards(request: import("../cosmos/distribution/v1beta1/query").QueryDelegationRewardsRequest): Promise; + delegationTotalRewards(request: import("../cosmos/distribution/v1beta1/query").QueryDelegationTotalRewardsRequest): Promise; + delegatorValidators(request: import("../cosmos/distribution/v1beta1/query").QueryDelegatorValidatorsRequest): Promise; + delegatorWithdrawAddress(request: import("../cosmos/distribution/v1beta1/query").QueryDelegatorWithdrawAddressRequest): Promise; + communityPool(request?: import("../cosmos/distribution/v1beta1/query").QueryCommunityPoolRequest): Promise; + }; + }; + evidence: { + v1beta1: { + evidence(request: import("../cosmos/evidence/v1beta1/query").QueryEvidenceRequest): Promise; + allEvidence(request?: import("../cosmos/evidence/v1beta1/query").QueryAllEvidenceRequest): Promise; + }; + }; + feegrant: { + v1beta1: { + allowance(request: import("../cosmos/feegrant/v1beta1/query").QueryAllowanceRequest): Promise; + allowances(request: import("../cosmos/feegrant/v1beta1/query").QueryAllowancesRequest): Promise; + allowancesByGranter(request: import("../cosmos/feegrant/v1beta1/query").QueryAllowancesByGranterRequest): Promise; + }; + }; + gov: { + v1: { + proposal(request: import("../cosmos/gov/v1/query").QueryProposalRequest): Promise; + proposals(request: import("../cosmos/gov/v1/query").QueryProposalsRequest): Promise; + vote(request: import("../cosmos/gov/v1/query").QueryVoteRequest): Promise; + votes(request: import("../cosmos/gov/v1/query").QueryVotesRequest): Promise; + params(request: import("../cosmos/gov/v1/query").QueryParamsRequest): Promise; + deposit(request: import("../cosmos/gov/v1/query").QueryDepositRequest): Promise; + deposits(request: import("../cosmos/gov/v1/query").QueryDepositsRequest): Promise; + tallyResult(request: import("../cosmos/gov/v1/query").QueryTallyResultRequest): Promise; + }; + v1beta1: { + proposal(request: import("../cosmos/gov/v1beta1/query").QueryProposalRequest): Promise; + proposals(request: import("../cosmos/gov/v1beta1/query").QueryProposalsRequest): Promise; + vote(request: import("../cosmos/gov/v1beta1/query").QueryVoteRequest): Promise; + votes(request: import("../cosmos/gov/v1beta1/query").QueryVotesRequest): Promise; + params(request: import("../cosmos/gov/v1beta1/query").QueryParamsRequest): Promise; + deposit(request: import("../cosmos/gov/v1beta1/query").QueryDepositRequest): Promise; + deposits(request: import("../cosmos/gov/v1beta1/query").QueryDepositsRequest): Promise; + tallyResult(request: import("../cosmos/gov/v1beta1/query").QueryTallyResultRequest): Promise; + }; + }; + group: { + v1: { + groupInfo(request: import("../cosmos/group/v1/query").QueryGroupInfoRequest): Promise; + groupPolicyInfo(request: import("../cosmos/group/v1/query").QueryGroupPolicyInfoRequest): Promise; + groupMembers(request: import("../cosmos/group/v1/query").QueryGroupMembersRequest): Promise; + groupsByAdmin(request: import("../cosmos/group/v1/query").QueryGroupsByAdminRequest): Promise; + groupPoliciesByGroup(request: import("../cosmos/group/v1/query").QueryGroupPoliciesByGroupRequest): Promise; + groupPoliciesByAdmin(request: import("../cosmos/group/v1/query").QueryGroupPoliciesByAdminRequest): Promise; + proposal(request: import("../cosmos/group/v1/query").QueryProposalRequest): Promise; + proposalsByGroupPolicy(request: import("../cosmos/group/v1/query").QueryProposalsByGroupPolicyRequest): Promise; + voteByProposalVoter(request: import("../cosmos/group/v1/query").QueryVoteByProposalVoterRequest): Promise; + votesByProposal(request: import("../cosmos/group/v1/query").QueryVotesByProposalRequest): Promise; + votesByVoter(request: import("../cosmos/group/v1/query").QueryVotesByVoterRequest): Promise; + groupsByMember(request: import("../cosmos/group/v1/query").QueryGroupsByMemberRequest): Promise; + tallyResult(request: import("../cosmos/group/v1/query").QueryTallyResultRequest): Promise; + }; + }; + mint: { + v1beta1: { + params(request?: import("../cosmos/mint/v1beta1/query").QueryParamsRequest): Promise; + inflation(request?: import("../cosmos/mint/v1beta1/query").QueryInflationRequest): Promise; + annualProvisions(request?: import("../cosmos/mint/v1beta1/query").QueryAnnualProvisionsRequest): Promise; + }; + }; + nft: { + v1beta1: { + balance(request: import("../cosmos/nft/v1beta1/query").QueryBalanceRequest): Promise; + owner(request: import("../cosmos/nft/v1beta1/query").QueryOwnerRequest): Promise; + supply(request: import("../cosmos/nft/v1beta1/query").QuerySupplyRequest): Promise; + nFTs(request: import("../cosmos/nft/v1beta1/query").QueryNFTsRequest): Promise; + nFT(request: import("../cosmos/nft/v1beta1/query").QueryNFTRequest): Promise; + class(request: import("../cosmos/nft/v1beta1/query").QueryClassRequest): Promise; + classes(request?: import("../cosmos/nft/v1beta1/query").QueryClassesRequest): Promise; + }; + }; + params: { + v1beta1: { + params(request: import("../cosmos/params/v1beta1/query").QueryParamsRequest): Promise; + subspaces(request?: import("../cosmos/params/v1beta1/query").QuerySubspacesRequest): Promise; + }; + }; + slashing: { + v1beta1: { + params(request?: import("../cosmos/slashing/v1beta1/query").QueryParamsRequest): Promise; + signingInfo(request: import("../cosmos/slashing/v1beta1/query").QuerySigningInfoRequest): Promise; + signingInfos(request?: import("../cosmos/slashing/v1beta1/query").QuerySigningInfosRequest): Promise; + }; + }; + staking: { + v1beta1: { + validators(request: import("../cosmos/staking/v1beta1/query").QueryValidatorsRequest): Promise; + validator(request: import("../cosmos/staking/v1beta1/query").QueryValidatorRequest): Promise; + validatorDelegations(request: import("../cosmos/staking/v1beta1/query").QueryValidatorDelegationsRequest): Promise; + validatorUnbondingDelegations(request: import("../cosmos/staking/v1beta1/query").QueryValidatorUnbondingDelegationsRequest): Promise; + delegation(request: import("../cosmos/staking/v1beta1/query").QueryDelegationRequest): Promise; + unbondingDelegation(request: import("../cosmos/staking/v1beta1/query").QueryUnbondingDelegationRequest): Promise; + delegatorDelegations(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorDelegationsRequest): Promise; + delegatorUnbondingDelegations(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorUnbondingDelegationsRequest): Promise; + redelegations(request: import("../cosmos/staking/v1beta1/query").QueryRedelegationsRequest): Promise; + delegatorValidators(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorValidatorsRequest): Promise; + delegatorValidator(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorValidatorRequest): Promise; + historicalInfo(request: import("../cosmos/staking/v1beta1/query").QueryHistoricalInfoRequest): Promise; + pool(request?: import("../cosmos/staking/v1beta1/query").QueryPoolRequest): Promise; + params(request?: import("../cosmos/staking/v1beta1/query").QueryParamsRequest): Promise; + }; + }; + tx: { + v1beta1: { + simulate(request: import("../cosmos/tx/v1beta1/service").SimulateRequest): Promise; + getTx(request: import("../cosmos/tx/v1beta1/service").GetTxRequest): Promise; + broadcastTx(request: import("../cosmos/tx/v1beta1/service").BroadcastTxRequest): Promise; + getTxsEvent(request: import("../cosmos/tx/v1beta1/service").GetTxsEventRequest): Promise; + getBlockWithTxs(request: import("../cosmos/tx/v1beta1/service").GetBlockWithTxsRequest): Promise; + }; + }; + upgrade: { + v1beta1: { + currentPlan(request?: import("../cosmos/upgrade/v1beta1/query").QueryCurrentPlanRequest): Promise; + appliedPlan(request: import("../cosmos/upgrade/v1beta1/query").QueryAppliedPlanRequest): Promise; + upgradedConsensusState(request: import("../cosmos/upgrade/v1beta1/query").QueryUpgradedConsensusStateRequest): Promise; + moduleVersions(request: import("../cosmos/upgrade/v1beta1/query").QueryModuleVersionsRequest): Promise; + authority(request?: import("../cosmos/upgrade/v1beta1/query").QueryAuthorityRequest): Promise; + }; + }; + }; + ibc: { + applications: { + transfer: { + v1: { + denomTrace(request: _111.QueryDenomTraceRequest): Promise<_111.QueryDenomTraceResponse>; + denomTraces(request?: _111.QueryDenomTracesRequest): Promise<_111.QueryDenomTracesResponse>; + params(request?: _111.QueryParamsRequest): Promise<_111.QueryParamsResponse>; + }; + }; + }; + core: { + channel: { + v1: { + channel(request: _117.QueryChannelRequest): Promise<_117.QueryChannelResponse>; + channels(request?: _117.QueryChannelsRequest): Promise<_117.QueryChannelsResponse>; + connectionChannels(request: _117.QueryConnectionChannelsRequest): Promise<_117.QueryConnectionChannelsResponse>; + channelClientState(request: _117.QueryChannelClientStateRequest): Promise<_117.QueryChannelClientStateResponse>; + channelConsensusState(request: _117.QueryChannelConsensusStateRequest): Promise<_117.QueryChannelConsensusStateResponse>; + packetCommitment(request: _117.QueryPacketCommitmentRequest): Promise<_117.QueryPacketCommitmentResponse>; + packetCommitments(request: _117.QueryPacketCommitmentsRequest): Promise<_117.QueryPacketCommitmentsResponse>; + packetReceipt(request: _117.QueryPacketReceiptRequest): Promise<_117.QueryPacketReceiptResponse>; + packetAcknowledgement(request: _117.QueryPacketAcknowledgementRequest): Promise<_117.QueryPacketAcknowledgementResponse>; + packetAcknowledgements(request: _117.QueryPacketAcknowledgementsRequest): Promise<_117.QueryPacketAcknowledgementsResponse>; + unreceivedPackets(request: _117.QueryUnreceivedPacketsRequest): Promise<_117.QueryUnreceivedPacketsResponse>; + unreceivedAcks(request: _117.QueryUnreceivedAcksRequest): Promise<_117.QueryUnreceivedAcksResponse>; + nextSequenceReceive(request: _117.QueryNextSequenceReceiveRequest): Promise<_117.QueryNextSequenceReceiveResponse>; + }; + }; + client: { + v1: { + clientState(request: _121.QueryClientStateRequest): Promise<_121.QueryClientStateResponse>; + clientStates(request?: _121.QueryClientStatesRequest): Promise<_121.QueryClientStatesResponse>; + consensusState(request: _121.QueryConsensusStateRequest): Promise<_121.QueryConsensusStateResponse>; + consensusStates(request: _121.QueryConsensusStatesRequest): Promise<_121.QueryConsensusStatesResponse>; + clientStatus(request: _121.QueryClientStatusRequest): Promise<_121.QueryClientStatusResponse>; + clientParams(request?: _121.QueryClientParamsRequest): Promise<_121.QueryClientParamsResponse>; + upgradedClientState(request?: _121.QueryUpgradedClientStateRequest): Promise<_121.QueryUpgradedClientStateResponse>; + upgradedConsensusState(request?: _121.QueryUpgradedConsensusStateRequest): Promise<_121.QueryUpgradedConsensusStateResponse>; + }; + }; + connection: { + v1: { + connection(request: _126.QueryConnectionRequest): Promise<_126.QueryConnectionResponse>; + connections(request?: _126.QueryConnectionsRequest): Promise<_126.QueryConnectionsResponse>; + clientConnections(request: _126.QueryClientConnectionsRequest): Promise<_126.QueryClientConnectionsResponse>; + connectionClientState(request: _126.QueryConnectionClientStateRequest): Promise<_126.QueryConnectionClientStateResponse>; + connectionConsensusState(request: _126.QueryConnectionConsensusStateRequest): Promise<_126.QueryConnectionConsensusStateResponse>; + }; + }; + port: { + v1: { + appVersion(request: _128.QueryAppVersionRequest): Promise<_128.QueryAppVersionResponse>; + }; + }; + }; + }; + }>; + createLCDClient: ({ restEndpoint }: { + restEndpoint: string; + }) => Promise<{ + cosmos: { + auth: { + v1beta1: import("../cosmos/auth/v1beta1/query.lcd").LCDQueryClient; + }; + authz: { + v1beta1: import("../cosmos/authz/v1beta1/query.lcd").LCDQueryClient; + }; + bank: { + v1beta1: import("../cosmos/bank/v1beta1/query.lcd").LCDQueryClient; + }; + base: { + tendermint: { + v1beta1: import("../cosmos/base/tendermint/v1beta1/query.lcd").LCDQueryClient; + }; + }; + distribution: { + v1beta1: import("../cosmos/distribution/v1beta1/query.lcd").LCDQueryClient; + }; + evidence: { + v1beta1: import("../cosmos/evidence/v1beta1/query.lcd").LCDQueryClient; + }; + feegrant: { + v1beta1: import("../cosmos/feegrant/v1beta1/query.lcd").LCDQueryClient; + }; + gov: { + v1: import("../cosmos/gov/v1/query.lcd").LCDQueryClient; + v1beta1: import("../cosmos/gov/v1beta1/query.lcd").LCDQueryClient; + }; + group: { + v1: import("../cosmos/group/v1/query.lcd").LCDQueryClient; + }; + mint: { + v1beta1: import("../cosmos/mint/v1beta1/query.lcd").LCDQueryClient; + }; + nft: { + v1beta1: import("../cosmos/nft/v1beta1/query.lcd").LCDQueryClient; + }; + params: { + v1beta1: import("../cosmos/params/v1beta1/query.lcd").LCDQueryClient; + }; + slashing: { + v1beta1: import("../cosmos/slashing/v1beta1/query.lcd").LCDQueryClient; + }; + staking: { + v1beta1: import("../cosmos/staking/v1beta1/query.lcd").LCDQueryClient; + }; + tx: { + v1beta1: import("../cosmos/tx/v1beta1/service.lcd").LCDQueryClient; + }; + upgrade: { + v1beta1: import("../cosmos/upgrade/v1beta1/query.lcd").LCDQueryClient; + }; + }; + ibc: { + applications: { + transfer: { + v1: _197.LCDQueryClient; + }; + }; + core: { + channel: { + v1: _198.LCDQueryClient; + }; + client: { + v1: _199.LCDQueryClient; + }; + connection: { + v1: _200.LCDQueryClient; + }; + }; + }; + }>; + }; +} diff --git a/packages/codegen/dist/ibc/core/channel/v1/channel.d.ts b/packages/codegen/dist/ibc/core/channel/v1/channel.d.ts new file mode 100644 index 00000000..b039e81f --- /dev/null +++ b/packages/codegen/dist/ibc/core/channel/v1/channel.d.ts @@ -0,0 +1,244 @@ +/// +import { Height, HeightSDKType } from "../../client/v1/client"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * State defines if a channel is in one of the following states: + * CLOSED, INIT, TRYOPEN, OPEN or UNINITIALIZED. + */ +export declare enum State { + /** STATE_UNINITIALIZED_UNSPECIFIED - Default State */ + STATE_UNINITIALIZED_UNSPECIFIED = 0, + /** STATE_INIT - A channel has just started the opening handshake. */ + STATE_INIT = 1, + /** STATE_TRYOPEN - A channel has acknowledged the handshake step on the counterparty chain. */ + STATE_TRYOPEN = 2, + /** + * STATE_OPEN - A channel has completed the handshake. Open channels are + * ready to send and receive packets. + */ + STATE_OPEN = 3, + /** + * STATE_CLOSED - A channel has been closed and can no longer be used to send or receive + * packets. + */ + STATE_CLOSED = 4, + UNRECOGNIZED = -1 +} +export declare const StateSDKType: typeof State; +export declare function stateFromJSON(object: any): State; +export declare function stateToJSON(object: State): string; +/** Order defines if a channel is ORDERED or UNORDERED */ +export declare enum Order { + /** ORDER_NONE_UNSPECIFIED - zero-value for channel ordering */ + ORDER_NONE_UNSPECIFIED = 0, + /** + * ORDER_UNORDERED - packets can be delivered in any order, which may differ from the order in + * which they were sent. + */ + ORDER_UNORDERED = 1, + /** ORDER_ORDERED - packets are delivered exactly in the order which they were sent */ + ORDER_ORDERED = 2, + UNRECOGNIZED = -1 +} +export declare const OrderSDKType: typeof Order; +export declare function orderFromJSON(object: any): Order; +export declare function orderToJSON(object: Order): string; +/** + * Channel defines pipeline for exactly-once packet delivery between specific + * modules on separate blockchains, which has at least one end capable of + * sending packets and one end capable of receiving packets. + */ +export interface Channel { + /** current state of the channel end */ + state: State; + /** whether the channel is ordered or unordered */ + ordering: Order; + /** counterparty channel end */ + counterparty?: Counterparty; + /** + * list of connection identifiers, in order, along which packets sent on + * this channel will travel + */ + connectionHops: string[]; + /** opaque channel version, which is agreed upon during the handshake */ + version: string; +} +/** + * Channel defines pipeline for exactly-once packet delivery between specific + * modules on separate blockchains, which has at least one end capable of + * sending packets and one end capable of receiving packets. + */ +export interface ChannelSDKType { + state: State; + ordering: Order; + counterparty?: CounterpartySDKType; + connection_hops: string[]; + version: string; +} +/** + * IdentifiedChannel defines a channel with additional port and channel + * identifier fields. + */ +export interface IdentifiedChannel { + /** current state of the channel end */ + state: State; + /** whether the channel is ordered or unordered */ + ordering: Order; + /** counterparty channel end */ + counterparty?: Counterparty; + /** + * list of connection identifiers, in order, along which packets sent on + * this channel will travel + */ + connectionHops: string[]; + /** opaque channel version, which is agreed upon during the handshake */ + version: string; + /** port identifier */ + portId: string; + /** channel identifier */ + channelId: string; +} +/** + * IdentifiedChannel defines a channel with additional port and channel + * identifier fields. + */ +export interface IdentifiedChannelSDKType { + state: State; + ordering: Order; + counterparty?: CounterpartySDKType; + connection_hops: string[]; + version: string; + port_id: string; + channel_id: string; +} +/** Counterparty defines a channel end counterparty */ +export interface Counterparty { + /** port on the counterparty chain which owns the other end of the channel. */ + portId: string; + /** channel end on the counterparty chain */ + channelId: string; +} +/** Counterparty defines a channel end counterparty */ +export interface CounterpartySDKType { + port_id: string; + channel_id: string; +} +/** Packet defines a type that carries data across different chains through IBC */ +export interface Packet { + /** + * number corresponds to the order of sends and receives, where a Packet + * with an earlier sequence number must be sent and received before a Packet + * with a later sequence number. + */ + sequence: Long; + /** identifies the port on the sending chain. */ + sourcePort: string; + /** identifies the channel end on the sending chain. */ + sourceChannel: string; + /** identifies the port on the receiving chain. */ + destinationPort: string; + /** identifies the channel end on the receiving chain. */ + destinationChannel: string; + /** actual opaque bytes transferred directly to the application module */ + data: Uint8Array; + /** block height after which the packet times out */ + timeoutHeight?: Height; + /** block timestamp (in nanoseconds) after which the packet times out */ + timeoutTimestamp: Long; +} +/** Packet defines a type that carries data across different chains through IBC */ +export interface PacketSDKType { + sequence: Long; + source_port: string; + source_channel: string; + destination_port: string; + destination_channel: string; + data: Uint8Array; + timeout_height?: HeightSDKType; + timeout_timestamp: Long; +} +/** + * PacketState defines the generic type necessary to retrieve and store + * packet commitments, acknowledgements, and receipts. + * Caller is responsible for knowing the context necessary to interpret this + * state as a commitment, acknowledgement, or a receipt. + */ +export interface PacketState { + /** channel port identifier. */ + portId: string; + /** channel unique identifier. */ + channelId: string; + /** packet sequence. */ + sequence: Long; + /** embedded data that represents packet state. */ + data: Uint8Array; +} +/** + * PacketState defines the generic type necessary to retrieve and store + * packet commitments, acknowledgements, and receipts. + * Caller is responsible for knowing the context necessary to interpret this + * state as a commitment, acknowledgement, or a receipt. + */ +export interface PacketStateSDKType { + port_id: string; + channel_id: string; + sequence: Long; + data: Uint8Array; +} +/** + * Acknowledgement is the recommended acknowledgement format to be used by + * app-specific protocols. + * NOTE: The field numbers 21 and 22 were explicitly chosen to avoid accidental + * conflicts with other protobuf message formats used for acknowledgements. + * The first byte of any message with this format will be the non-ASCII values + * `0xaa` (result) or `0xb2` (error). Implemented as defined by ICS: + * https://github.com/cosmos/ibc/tree/master/spec/core/ics-004-channel-and-packet-semantics#acknowledgement-envelope + */ +export interface Acknowledgement { + result?: Uint8Array; + error?: string; +} +/** + * Acknowledgement is the recommended acknowledgement format to be used by + * app-specific protocols. + * NOTE: The field numbers 21 and 22 were explicitly chosen to avoid accidental + * conflicts with other protobuf message formats used for acknowledgements. + * The first byte of any message with this format will be the non-ASCII values + * `0xaa` (result) or `0xb2` (error). Implemented as defined by ICS: + * https://github.com/cosmos/ibc/tree/master/spec/core/ics-004-channel-and-packet-semantics#acknowledgement-envelope + */ +export interface AcknowledgementSDKType { + result?: Uint8Array; + error?: string; +} +export declare const Channel: { + encode(message: Channel, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Channel; + fromPartial(object: DeepPartial): Channel; +}; +export declare const IdentifiedChannel: { + encode(message: IdentifiedChannel, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): IdentifiedChannel; + fromPartial(object: DeepPartial): IdentifiedChannel; +}; +export declare const Counterparty: { + encode(message: Counterparty, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Counterparty; + fromPartial(object: DeepPartial): Counterparty; +}; +export declare const Packet: { + encode(message: Packet, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Packet; + fromPartial(object: DeepPartial): Packet; +}; +export declare const PacketState: { + encode(message: PacketState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PacketState; + fromPartial(object: DeepPartial): PacketState; +}; +export declare const Acknowledgement: { + encode(message: Acknowledgement, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Acknowledgement; + fromPartial(object: DeepPartial): Acknowledgement; +}; diff --git a/packages/codegen/dist/ibc/core/channel/v1/genesis.d.ts b/packages/codegen/dist/ibc/core/channel/v1/genesis.d.ts new file mode 100644 index 00000000..0dca7bea --- /dev/null +++ b/packages/codegen/dist/ibc/core/channel/v1/genesis.d.ts @@ -0,0 +1,55 @@ +/// +import { IdentifiedChannel, IdentifiedChannelSDKType, PacketState, PacketStateSDKType } from "./channel"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisState defines the ibc channel submodule's genesis state. */ +export interface GenesisState { + channels: IdentifiedChannel[]; + acknowledgements: PacketState[]; + commitments: PacketState[]; + receipts: PacketState[]; + sendSequences: PacketSequence[]; + recvSequences: PacketSequence[]; + ackSequences: PacketSequence[]; + /** the sequence for the next generated channel identifier */ + nextChannelSequence: Long; +} +/** GenesisState defines the ibc channel submodule's genesis state. */ +export interface GenesisStateSDKType { + channels: IdentifiedChannelSDKType[]; + acknowledgements: PacketStateSDKType[]; + commitments: PacketStateSDKType[]; + receipts: PacketStateSDKType[]; + send_sequences: PacketSequenceSDKType[]; + recv_sequences: PacketSequenceSDKType[]; + ack_sequences: PacketSequenceSDKType[]; + next_channel_sequence: Long; +} +/** + * PacketSequence defines the genesis type necessary to retrieve and store + * next send and receive sequences. + */ +export interface PacketSequence { + portId: string; + channelId: string; + sequence: Long; +} +/** + * PacketSequence defines the genesis type necessary to retrieve and store + * next send and receive sequences. + */ +export interface PacketSequenceSDKType { + port_id: string; + channel_id: string; + sequence: Long; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; +export declare const PacketSequence: { + encode(message: PacketSequence, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PacketSequence; + fromPartial(object: DeepPartial): PacketSequence; +}; diff --git a/packages/codegen/dist/ibc/core/channel/v1/query.d.ts b/packages/codegen/dist/ibc/core/channel/v1/query.d.ts new file mode 100644 index 00000000..202e5c8e --- /dev/null +++ b/packages/codegen/dist/ibc/core/channel/v1/query.d.ts @@ -0,0 +1,656 @@ +/// +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../../../cosmos/base/query/v1beta1/pagination"; +import { Channel, ChannelSDKType, IdentifiedChannel, IdentifiedChannelSDKType, PacketState, PacketStateSDKType } from "./channel"; +import { Height, HeightSDKType, IdentifiedClientState, IdentifiedClientStateSDKType } from "../../client/v1/client"; +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** QueryChannelRequest is the request type for the Query/Channel RPC method */ +export interface QueryChannelRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + channelId: string; +} +/** QueryChannelRequest is the request type for the Query/Channel RPC method */ +export interface QueryChannelRequestSDKType { + port_id: string; + channel_id: string; +} +/** + * QueryChannelResponse is the response type for the Query/Channel RPC method. + * Besides the Channel end, it includes a proof and the height from which the + * proof was retrieved. + */ +export interface QueryChannelResponse { + /** channel associated with the request identifiers */ + channel?: Channel; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was retrieved */ + proofHeight?: Height; +} +/** + * QueryChannelResponse is the response type for the Query/Channel RPC method. + * Besides the Channel end, it includes a proof and the height from which the + * proof was retrieved. + */ +export interface QueryChannelResponseSDKType { + channel?: ChannelSDKType; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** QueryChannelsRequest is the request type for the Query/Channels RPC method */ +export interface QueryChannelsRequest { + /** pagination request */ + pagination?: PageRequest; +} +/** QueryChannelsRequest is the request type for the Query/Channels RPC method */ +export interface QueryChannelsRequestSDKType { + pagination?: PageRequestSDKType; +} +/** QueryChannelsResponse is the response type for the Query/Channels RPC method. */ +export interface QueryChannelsResponse { + /** list of stored channels of the chain. */ + channels: IdentifiedChannel[]; + /** pagination response */ + pagination?: PageResponse; + /** query block height */ + height?: Height; +} +/** QueryChannelsResponse is the response type for the Query/Channels RPC method. */ +export interface QueryChannelsResponseSDKType { + channels: IdentifiedChannelSDKType[]; + pagination?: PageResponseSDKType; + height?: HeightSDKType; +} +/** + * QueryConnectionChannelsRequest is the request type for the + * Query/QueryConnectionChannels RPC method + */ +export interface QueryConnectionChannelsRequest { + /** connection unique identifier */ + connection: string; + /** pagination request */ + pagination?: PageRequest; +} +/** + * QueryConnectionChannelsRequest is the request type for the + * Query/QueryConnectionChannels RPC method + */ +export interface QueryConnectionChannelsRequestSDKType { + connection: string; + pagination?: PageRequestSDKType; +} +/** + * QueryConnectionChannelsResponse is the Response type for the + * Query/QueryConnectionChannels RPC method + */ +export interface QueryConnectionChannelsResponse { + /** list of channels associated with a connection. */ + channels: IdentifiedChannel[]; + /** pagination response */ + pagination?: PageResponse; + /** query block height */ + height?: Height; +} +/** + * QueryConnectionChannelsResponse is the Response type for the + * Query/QueryConnectionChannels RPC method + */ +export interface QueryConnectionChannelsResponseSDKType { + channels: IdentifiedChannelSDKType[]; + pagination?: PageResponseSDKType; + height?: HeightSDKType; +} +/** + * QueryChannelClientStateRequest is the request type for the Query/ClientState + * RPC method + */ +export interface QueryChannelClientStateRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + channelId: string; +} +/** + * QueryChannelClientStateRequest is the request type for the Query/ClientState + * RPC method + */ +export interface QueryChannelClientStateRequestSDKType { + port_id: string; + channel_id: string; +} +/** + * QueryChannelClientStateResponse is the Response type for the + * Query/QueryChannelClientState RPC method + */ +export interface QueryChannelClientStateResponse { + /** client state associated with the channel */ + identifiedClientState?: IdentifiedClientState; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was retrieved */ + proofHeight?: Height; +} +/** + * QueryChannelClientStateResponse is the Response type for the + * Query/QueryChannelClientState RPC method + */ +export interface QueryChannelClientStateResponseSDKType { + identified_client_state?: IdentifiedClientStateSDKType; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryChannelConsensusStateRequest is the request type for the + * Query/ConsensusState RPC method + */ +export interface QueryChannelConsensusStateRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + channelId: string; + /** revision number of the consensus state */ + revisionNumber: Long; + /** revision height of the consensus state */ + revisionHeight: Long; +} +/** + * QueryChannelConsensusStateRequest is the request type for the + * Query/ConsensusState RPC method + */ +export interface QueryChannelConsensusStateRequestSDKType { + port_id: string; + channel_id: string; + revision_number: Long; + revision_height: Long; +} +/** + * QueryChannelClientStateResponse is the Response type for the + * Query/QueryChannelClientState RPC method + */ +export interface QueryChannelConsensusStateResponse { + /** consensus state associated with the channel */ + consensusState?: Any; + /** client ID associated with the consensus state */ + clientId: string; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was retrieved */ + proofHeight?: Height; +} +/** + * QueryChannelClientStateResponse is the Response type for the + * Query/QueryChannelClientState RPC method + */ +export interface QueryChannelConsensusStateResponseSDKType { + consensus_state?: AnySDKType; + client_id: string; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryPacketCommitmentRequest is the request type for the + * Query/PacketCommitment RPC method + */ +export interface QueryPacketCommitmentRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + channelId: string; + /** packet sequence */ + sequence: Long; +} +/** + * QueryPacketCommitmentRequest is the request type for the + * Query/PacketCommitment RPC method + */ +export interface QueryPacketCommitmentRequestSDKType { + port_id: string; + channel_id: string; + sequence: Long; +} +/** + * QueryPacketCommitmentResponse defines the client query response for a packet + * which also includes a proof and the height from which the proof was + * retrieved + */ +export interface QueryPacketCommitmentResponse { + /** packet associated with the request fields */ + commitment: Uint8Array; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was retrieved */ + proofHeight?: Height; +} +/** + * QueryPacketCommitmentResponse defines the client query response for a packet + * which also includes a proof and the height from which the proof was + * retrieved + */ +export interface QueryPacketCommitmentResponseSDKType { + commitment: Uint8Array; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryPacketCommitmentsRequest is the request type for the + * Query/QueryPacketCommitments RPC method + */ +export interface QueryPacketCommitmentsRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + channelId: string; + /** pagination request */ + pagination?: PageRequest; +} +/** + * QueryPacketCommitmentsRequest is the request type for the + * Query/QueryPacketCommitments RPC method + */ +export interface QueryPacketCommitmentsRequestSDKType { + port_id: string; + channel_id: string; + pagination?: PageRequestSDKType; +} +/** + * QueryPacketCommitmentsResponse is the request type for the + * Query/QueryPacketCommitments RPC method + */ +export interface QueryPacketCommitmentsResponse { + commitments: PacketState[]; + /** pagination response */ + pagination?: PageResponse; + /** query block height */ + height?: Height; +} +/** + * QueryPacketCommitmentsResponse is the request type for the + * Query/QueryPacketCommitments RPC method + */ +export interface QueryPacketCommitmentsResponseSDKType { + commitments: PacketStateSDKType[]; + pagination?: PageResponseSDKType; + height?: HeightSDKType; +} +/** + * QueryPacketReceiptRequest is the request type for the + * Query/PacketReceipt RPC method + */ +export interface QueryPacketReceiptRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + channelId: string; + /** packet sequence */ + sequence: Long; +} +/** + * QueryPacketReceiptRequest is the request type for the + * Query/PacketReceipt RPC method + */ +export interface QueryPacketReceiptRequestSDKType { + port_id: string; + channel_id: string; + sequence: Long; +} +/** + * QueryPacketReceiptResponse defines the client query response for a packet + * receipt which also includes a proof, and the height from which the proof was + * retrieved + */ +export interface QueryPacketReceiptResponse { + /** success flag for if receipt exists */ + received: boolean; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was retrieved */ + proofHeight?: Height; +} +/** + * QueryPacketReceiptResponse defines the client query response for a packet + * receipt which also includes a proof, and the height from which the proof was + * retrieved + */ +export interface QueryPacketReceiptResponseSDKType { + received: boolean; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryPacketAcknowledgementRequest is the request type for the + * Query/PacketAcknowledgement RPC method + */ +export interface QueryPacketAcknowledgementRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + channelId: string; + /** packet sequence */ + sequence: Long; +} +/** + * QueryPacketAcknowledgementRequest is the request type for the + * Query/PacketAcknowledgement RPC method + */ +export interface QueryPacketAcknowledgementRequestSDKType { + port_id: string; + channel_id: string; + sequence: Long; +} +/** + * QueryPacketAcknowledgementResponse defines the client query response for a + * packet which also includes a proof and the height from which the + * proof was retrieved + */ +export interface QueryPacketAcknowledgementResponse { + /** packet associated with the request fields */ + acknowledgement: Uint8Array; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was retrieved */ + proofHeight?: Height; +} +/** + * QueryPacketAcknowledgementResponse defines the client query response for a + * packet which also includes a proof and the height from which the + * proof was retrieved + */ +export interface QueryPacketAcknowledgementResponseSDKType { + acknowledgement: Uint8Array; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryPacketAcknowledgementsRequest is the request type for the + * Query/QueryPacketCommitments RPC method + */ +export interface QueryPacketAcknowledgementsRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + channelId: string; + /** pagination request */ + pagination?: PageRequest; + /** list of packet sequences */ + packetCommitmentSequences: Long[]; +} +/** + * QueryPacketAcknowledgementsRequest is the request type for the + * Query/QueryPacketCommitments RPC method + */ +export interface QueryPacketAcknowledgementsRequestSDKType { + port_id: string; + channel_id: string; + pagination?: PageRequestSDKType; + packet_commitment_sequences: Long[]; +} +/** + * QueryPacketAcknowledgemetsResponse is the request type for the + * Query/QueryPacketAcknowledgements RPC method + */ +export interface QueryPacketAcknowledgementsResponse { + acknowledgements: PacketState[]; + /** pagination response */ + pagination?: PageResponse; + /** query block height */ + height?: Height; +} +/** + * QueryPacketAcknowledgemetsResponse is the request type for the + * Query/QueryPacketAcknowledgements RPC method + */ +export interface QueryPacketAcknowledgementsResponseSDKType { + acknowledgements: PacketStateSDKType[]; + pagination?: PageResponseSDKType; + height?: HeightSDKType; +} +/** + * QueryUnreceivedPacketsRequest is the request type for the + * Query/UnreceivedPackets RPC method + */ +export interface QueryUnreceivedPacketsRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + channelId: string; + /** list of packet sequences */ + packetCommitmentSequences: Long[]; +} +/** + * QueryUnreceivedPacketsRequest is the request type for the + * Query/UnreceivedPackets RPC method + */ +export interface QueryUnreceivedPacketsRequestSDKType { + port_id: string; + channel_id: string; + packet_commitment_sequences: Long[]; +} +/** + * QueryUnreceivedPacketsResponse is the response type for the + * Query/UnreceivedPacketCommitments RPC method + */ +export interface QueryUnreceivedPacketsResponse { + /** list of unreceived packet sequences */ + sequences: Long[]; + /** query block height */ + height?: Height; +} +/** + * QueryUnreceivedPacketsResponse is the response type for the + * Query/UnreceivedPacketCommitments RPC method + */ +export interface QueryUnreceivedPacketsResponseSDKType { + sequences: Long[]; + height?: HeightSDKType; +} +/** + * QueryUnreceivedAcks is the request type for the + * Query/UnreceivedAcks RPC method + */ +export interface QueryUnreceivedAcksRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + channelId: string; + /** list of acknowledgement sequences */ + packetAckSequences: Long[]; +} +/** + * QueryUnreceivedAcks is the request type for the + * Query/UnreceivedAcks RPC method + */ +export interface QueryUnreceivedAcksRequestSDKType { + port_id: string; + channel_id: string; + packet_ack_sequences: Long[]; +} +/** + * QueryUnreceivedAcksResponse is the response type for the + * Query/UnreceivedAcks RPC method + */ +export interface QueryUnreceivedAcksResponse { + /** list of unreceived acknowledgement sequences */ + sequences: Long[]; + /** query block height */ + height?: Height; +} +/** + * QueryUnreceivedAcksResponse is the response type for the + * Query/UnreceivedAcks RPC method + */ +export interface QueryUnreceivedAcksResponseSDKType { + sequences: Long[]; + height?: HeightSDKType; +} +/** + * QueryNextSequenceReceiveRequest is the request type for the + * Query/QueryNextSequenceReceiveRequest RPC method + */ +export interface QueryNextSequenceReceiveRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + channelId: string; +} +/** + * QueryNextSequenceReceiveRequest is the request type for the + * Query/QueryNextSequenceReceiveRequest RPC method + */ +export interface QueryNextSequenceReceiveRequestSDKType { + port_id: string; + channel_id: string; +} +/** + * QuerySequenceResponse is the request type for the + * Query/QueryNextSequenceReceiveResponse RPC method + */ +export interface QueryNextSequenceReceiveResponse { + /** next sequence receive number */ + nextSequenceReceive: Long; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was retrieved */ + proofHeight?: Height; +} +/** + * QuerySequenceResponse is the request type for the + * Query/QueryNextSequenceReceiveResponse RPC method + */ +export interface QueryNextSequenceReceiveResponseSDKType { + next_sequence_receive: Long; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +export declare const QueryChannelRequest: { + encode(message: QueryChannelRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelRequest; + fromPartial(object: DeepPartial): QueryChannelRequest; +}; +export declare const QueryChannelResponse: { + encode(message: QueryChannelResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelResponse; + fromPartial(object: DeepPartial): QueryChannelResponse; +}; +export declare const QueryChannelsRequest: { + encode(message: QueryChannelsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelsRequest; + fromPartial(object: DeepPartial): QueryChannelsRequest; +}; +export declare const QueryChannelsResponse: { + encode(message: QueryChannelsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelsResponse; + fromPartial(object: DeepPartial): QueryChannelsResponse; +}; +export declare const QueryConnectionChannelsRequest: { + encode(message: QueryConnectionChannelsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionChannelsRequest; + fromPartial(object: DeepPartial): QueryConnectionChannelsRequest; +}; +export declare const QueryConnectionChannelsResponse: { + encode(message: QueryConnectionChannelsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionChannelsResponse; + fromPartial(object: DeepPartial): QueryConnectionChannelsResponse; +}; +export declare const QueryChannelClientStateRequest: { + encode(message: QueryChannelClientStateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelClientStateRequest; + fromPartial(object: DeepPartial): QueryChannelClientStateRequest; +}; +export declare const QueryChannelClientStateResponse: { + encode(message: QueryChannelClientStateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelClientStateResponse; + fromPartial(object: DeepPartial): QueryChannelClientStateResponse; +}; +export declare const QueryChannelConsensusStateRequest: { + encode(message: QueryChannelConsensusStateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelConsensusStateRequest; + fromPartial(object: DeepPartial): QueryChannelConsensusStateRequest; +}; +export declare const QueryChannelConsensusStateResponse: { + encode(message: QueryChannelConsensusStateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelConsensusStateResponse; + fromPartial(object: DeepPartial): QueryChannelConsensusStateResponse; +}; +export declare const QueryPacketCommitmentRequest: { + encode(message: QueryPacketCommitmentRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketCommitmentRequest; + fromPartial(object: DeepPartial): QueryPacketCommitmentRequest; +}; +export declare const QueryPacketCommitmentResponse: { + encode(message: QueryPacketCommitmentResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketCommitmentResponse; + fromPartial(object: DeepPartial): QueryPacketCommitmentResponse; +}; +export declare const QueryPacketCommitmentsRequest: { + encode(message: QueryPacketCommitmentsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketCommitmentsRequest; + fromPartial(object: DeepPartial): QueryPacketCommitmentsRequest; +}; +export declare const QueryPacketCommitmentsResponse: { + encode(message: QueryPacketCommitmentsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketCommitmentsResponse; + fromPartial(object: DeepPartial): QueryPacketCommitmentsResponse; +}; +export declare const QueryPacketReceiptRequest: { + encode(message: QueryPacketReceiptRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketReceiptRequest; + fromPartial(object: DeepPartial): QueryPacketReceiptRequest; +}; +export declare const QueryPacketReceiptResponse: { + encode(message: QueryPacketReceiptResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketReceiptResponse; + fromPartial(object: DeepPartial): QueryPacketReceiptResponse; +}; +export declare const QueryPacketAcknowledgementRequest: { + encode(message: QueryPacketAcknowledgementRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketAcknowledgementRequest; + fromPartial(object: DeepPartial): QueryPacketAcknowledgementRequest; +}; +export declare const QueryPacketAcknowledgementResponse: { + encode(message: QueryPacketAcknowledgementResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketAcknowledgementResponse; + fromPartial(object: DeepPartial): QueryPacketAcknowledgementResponse; +}; +export declare const QueryPacketAcknowledgementsRequest: { + encode(message: QueryPacketAcknowledgementsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketAcknowledgementsRequest; + fromPartial(object: DeepPartial): QueryPacketAcknowledgementsRequest; +}; +export declare const QueryPacketAcknowledgementsResponse: { + encode(message: QueryPacketAcknowledgementsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketAcknowledgementsResponse; + fromPartial(object: DeepPartial): QueryPacketAcknowledgementsResponse; +}; +export declare const QueryUnreceivedPacketsRequest: { + encode(message: QueryUnreceivedPacketsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnreceivedPacketsRequest; + fromPartial(object: DeepPartial): QueryUnreceivedPacketsRequest; +}; +export declare const QueryUnreceivedPacketsResponse: { + encode(message: QueryUnreceivedPacketsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnreceivedPacketsResponse; + fromPartial(object: DeepPartial): QueryUnreceivedPacketsResponse; +}; +export declare const QueryUnreceivedAcksRequest: { + encode(message: QueryUnreceivedAcksRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnreceivedAcksRequest; + fromPartial(object: DeepPartial): QueryUnreceivedAcksRequest; +}; +export declare const QueryUnreceivedAcksResponse: { + encode(message: QueryUnreceivedAcksResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnreceivedAcksResponse; + fromPartial(object: DeepPartial): QueryUnreceivedAcksResponse; +}; +export declare const QueryNextSequenceReceiveRequest: { + encode(message: QueryNextSequenceReceiveRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNextSequenceReceiveRequest; + fromPartial(object: DeepPartial): QueryNextSequenceReceiveRequest; +}; +export declare const QueryNextSequenceReceiveResponse: { + encode(message: QueryNextSequenceReceiveResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNextSequenceReceiveResponse; + fromPartial(object: DeepPartial): QueryNextSequenceReceiveResponse; +}; diff --git a/packages/codegen/dist/ibc/core/channel/v1/query.lcd.d.ts b/packages/codegen/dist/ibc/core/channel/v1/query.lcd.d.ts new file mode 100644 index 00000000..f7b3029b --- /dev/null +++ b/packages/codegen/dist/ibc/core/channel/v1/query.lcd.d.ts @@ -0,0 +1,21 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryChannelRequest, QueryChannelResponseSDKType, QueryChannelsRequest, QueryChannelsResponseSDKType, QueryConnectionChannelsRequest, QueryConnectionChannelsResponseSDKType, QueryChannelClientStateRequest, QueryChannelClientStateResponseSDKType, QueryChannelConsensusStateRequest, QueryChannelConsensusStateResponseSDKType, QueryPacketCommitmentRequest, QueryPacketCommitmentResponseSDKType, QueryPacketCommitmentsRequest, QueryPacketCommitmentsResponseSDKType, QueryPacketReceiptRequest, QueryPacketReceiptResponseSDKType, QueryPacketAcknowledgementRequest, QueryPacketAcknowledgementResponseSDKType, QueryPacketAcknowledgementsRequest, QueryPacketAcknowledgementsResponseSDKType, QueryUnreceivedPacketsRequest, QueryUnreceivedPacketsResponseSDKType, QueryUnreceivedAcksRequest, QueryUnreceivedAcksResponseSDKType, QueryNextSequenceReceiveRequest, QueryNextSequenceReceiveResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + channel(params: QueryChannelRequest): Promise; + channels(params?: QueryChannelsRequest): Promise; + connectionChannels(params: QueryConnectionChannelsRequest): Promise; + channelClientState(params: QueryChannelClientStateRequest): Promise; + channelConsensusState(params: QueryChannelConsensusStateRequest): Promise; + packetCommitment(params: QueryPacketCommitmentRequest): Promise; + packetCommitments(params: QueryPacketCommitmentsRequest): Promise; + packetReceipt(params: QueryPacketReceiptRequest): Promise; + packetAcknowledgement(params: QueryPacketAcknowledgementRequest): Promise; + packetAcknowledgements(params: QueryPacketAcknowledgementsRequest): Promise; + unreceivedPackets(params: QueryUnreceivedPacketsRequest): Promise; + unreceivedAcks(params: QueryUnreceivedAcksRequest): Promise; + nextSequenceReceive(params: QueryNextSequenceReceiveRequest): Promise; +} diff --git a/packages/codegen/dist/ibc/core/channel/v1/query.rpc.Query.d.ts b/packages/codegen/dist/ibc/core/channel/v1/query.rpc.Query.d.ts new file mode 100644 index 00000000..69c372d5 --- /dev/null +++ b/packages/codegen/dist/ibc/core/channel/v1/query.rpc.Query.d.ts @@ -0,0 +1,88 @@ +import { Rpc } from "../../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryChannelRequest, QueryChannelResponse, QueryChannelsRequest, QueryChannelsResponse, QueryConnectionChannelsRequest, QueryConnectionChannelsResponse, QueryChannelClientStateRequest, QueryChannelClientStateResponse, QueryChannelConsensusStateRequest, QueryChannelConsensusStateResponse, QueryPacketCommitmentRequest, QueryPacketCommitmentResponse, QueryPacketCommitmentsRequest, QueryPacketCommitmentsResponse, QueryPacketReceiptRequest, QueryPacketReceiptResponse, QueryPacketAcknowledgementRequest, QueryPacketAcknowledgementResponse, QueryPacketAcknowledgementsRequest, QueryPacketAcknowledgementsResponse, QueryUnreceivedPacketsRequest, QueryUnreceivedPacketsResponse, QueryUnreceivedAcksRequest, QueryUnreceivedAcksResponse, QueryNextSequenceReceiveRequest, QueryNextSequenceReceiveResponse } from "./query"; +/** Query provides defines the gRPC querier service */ +export interface Query { + /** Channel queries an IBC Channel. */ + channel(request: QueryChannelRequest): Promise; + /** Channels queries all the IBC channels of a chain. */ + channels(request?: QueryChannelsRequest): Promise; + /** + * ConnectionChannels queries all the channels associated with a connection + * end. + */ + connectionChannels(request: QueryConnectionChannelsRequest): Promise; + /** + * ChannelClientState queries for the client state for the channel associated + * with the provided channel identifiers. + */ + channelClientState(request: QueryChannelClientStateRequest): Promise; + /** + * ChannelConsensusState queries for the consensus state for the channel + * associated with the provided channel identifiers. + */ + channelConsensusState(request: QueryChannelConsensusStateRequest): Promise; + /** PacketCommitment queries a stored packet commitment hash. */ + packetCommitment(request: QueryPacketCommitmentRequest): Promise; + /** + * PacketCommitments returns all the packet commitments hashes associated + * with a channel. + */ + packetCommitments(request: QueryPacketCommitmentsRequest): Promise; + /** + * PacketReceipt queries if a given packet sequence has been received on the + * queried chain + */ + packetReceipt(request: QueryPacketReceiptRequest): Promise; + /** PacketAcknowledgement queries a stored packet acknowledgement hash. */ + packetAcknowledgement(request: QueryPacketAcknowledgementRequest): Promise; + /** + * PacketAcknowledgements returns all the packet acknowledgements associated + * with a channel. + */ + packetAcknowledgements(request: QueryPacketAcknowledgementsRequest): Promise; + /** + * UnreceivedPackets returns all the unreceived IBC packets associated with a + * channel and sequences. + */ + unreceivedPackets(request: QueryUnreceivedPacketsRequest): Promise; + /** + * UnreceivedAcks returns all the unreceived IBC acknowledgements associated + * with a channel and sequences. + */ + unreceivedAcks(request: QueryUnreceivedAcksRequest): Promise; + /** NextSequenceReceive returns the next receive sequence for a given channel. */ + nextSequenceReceive(request: QueryNextSequenceReceiveRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + channel(request: QueryChannelRequest): Promise; + channels(request?: QueryChannelsRequest): Promise; + connectionChannels(request: QueryConnectionChannelsRequest): Promise; + channelClientState(request: QueryChannelClientStateRequest): Promise; + channelConsensusState(request: QueryChannelConsensusStateRequest): Promise; + packetCommitment(request: QueryPacketCommitmentRequest): Promise; + packetCommitments(request: QueryPacketCommitmentsRequest): Promise; + packetReceipt(request: QueryPacketReceiptRequest): Promise; + packetAcknowledgement(request: QueryPacketAcknowledgementRequest): Promise; + packetAcknowledgements(request: QueryPacketAcknowledgementsRequest): Promise; + unreceivedPackets(request: QueryUnreceivedPacketsRequest): Promise; + unreceivedAcks(request: QueryUnreceivedAcksRequest): Promise; + nextSequenceReceive(request: QueryNextSequenceReceiveRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + channel(request: QueryChannelRequest): Promise; + channels(request?: QueryChannelsRequest): Promise; + connectionChannels(request: QueryConnectionChannelsRequest): Promise; + channelClientState(request: QueryChannelClientStateRequest): Promise; + channelConsensusState(request: QueryChannelConsensusStateRequest): Promise; + packetCommitment(request: QueryPacketCommitmentRequest): Promise; + packetCommitments(request: QueryPacketCommitmentsRequest): Promise; + packetReceipt(request: QueryPacketReceiptRequest): Promise; + packetAcknowledgement(request: QueryPacketAcknowledgementRequest): Promise; + packetAcknowledgements(request: QueryPacketAcknowledgementsRequest): Promise; + unreceivedPackets(request: QueryUnreceivedPacketsRequest): Promise; + unreceivedAcks(request: QueryUnreceivedAcksRequest): Promise; + nextSequenceReceive(request: QueryNextSequenceReceiveRequest): Promise; +}; diff --git a/packages/codegen/dist/ibc/core/channel/v1/tx.d.ts b/packages/codegen/dist/ibc/core/channel/v1/tx.d.ts new file mode 100644 index 00000000..adb36668 --- /dev/null +++ b/packages/codegen/dist/ibc/core/channel/v1/tx.d.ts @@ -0,0 +1,377 @@ +/// +import { Channel, ChannelSDKType, Packet, PacketSDKType } from "./channel"; +import { Height, HeightSDKType } from "../../client/v1/client"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * MsgChannelOpenInit defines an sdk.Msg to initialize a channel handshake. It + * is called by a relayer on Chain A. + */ +export interface MsgChannelOpenInit { + portId: string; + channel?: Channel; + signer: string; +} +/** + * MsgChannelOpenInit defines an sdk.Msg to initialize a channel handshake. It + * is called by a relayer on Chain A. + */ +export interface MsgChannelOpenInitSDKType { + port_id: string; + channel?: ChannelSDKType; + signer: string; +} +/** MsgChannelOpenInitResponse defines the Msg/ChannelOpenInit response type. */ +export interface MsgChannelOpenInitResponse { +} +/** MsgChannelOpenInitResponse defines the Msg/ChannelOpenInit response type. */ +export interface MsgChannelOpenInitResponseSDKType { +} +/** + * MsgChannelOpenInit defines a msg sent by a Relayer to try to open a channel + * on Chain B. + */ +export interface MsgChannelOpenTry { + portId: string; + /** + * in the case of crossing hello's, when both chains call OpenInit, we need + * the channel identifier of the previous channel in state INIT + */ + previousChannelId: string; + channel?: Channel; + counterpartyVersion: string; + proofInit: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** + * MsgChannelOpenInit defines a msg sent by a Relayer to try to open a channel + * on Chain B. + */ +export interface MsgChannelOpenTrySDKType { + port_id: string; + previous_channel_id: string; + channel?: ChannelSDKType; + counterparty_version: string; + proof_init: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** MsgChannelOpenTryResponse defines the Msg/ChannelOpenTry response type. */ +export interface MsgChannelOpenTryResponse { +} +/** MsgChannelOpenTryResponse defines the Msg/ChannelOpenTry response type. */ +export interface MsgChannelOpenTryResponseSDKType { +} +/** + * MsgChannelOpenAck defines a msg sent by a Relayer to Chain A to acknowledge + * the change of channel state to TRYOPEN on Chain B. + */ +export interface MsgChannelOpenAck { + portId: string; + channelId: string; + counterpartyChannelId: string; + counterpartyVersion: string; + proofTry: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** + * MsgChannelOpenAck defines a msg sent by a Relayer to Chain A to acknowledge + * the change of channel state to TRYOPEN on Chain B. + */ +export interface MsgChannelOpenAckSDKType { + port_id: string; + channel_id: string; + counterparty_channel_id: string; + counterparty_version: string; + proof_try: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** MsgChannelOpenAckResponse defines the Msg/ChannelOpenAck response type. */ +export interface MsgChannelOpenAckResponse { +} +/** MsgChannelOpenAckResponse defines the Msg/ChannelOpenAck response type. */ +export interface MsgChannelOpenAckResponseSDKType { +} +/** + * MsgChannelOpenConfirm defines a msg sent by a Relayer to Chain B to + * acknowledge the change of channel state to OPEN on Chain A. + */ +export interface MsgChannelOpenConfirm { + portId: string; + channelId: string; + proofAck: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** + * MsgChannelOpenConfirm defines a msg sent by a Relayer to Chain B to + * acknowledge the change of channel state to OPEN on Chain A. + */ +export interface MsgChannelOpenConfirmSDKType { + port_id: string; + channel_id: string; + proof_ack: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** + * MsgChannelOpenConfirmResponse defines the Msg/ChannelOpenConfirm response + * type. + */ +export interface MsgChannelOpenConfirmResponse { +} +/** + * MsgChannelOpenConfirmResponse defines the Msg/ChannelOpenConfirm response + * type. + */ +export interface MsgChannelOpenConfirmResponseSDKType { +} +/** + * MsgChannelCloseInit defines a msg sent by a Relayer to Chain A + * to close a channel with Chain B. + */ +export interface MsgChannelCloseInit { + portId: string; + channelId: string; + signer: string; +} +/** + * MsgChannelCloseInit defines a msg sent by a Relayer to Chain A + * to close a channel with Chain B. + */ +export interface MsgChannelCloseInitSDKType { + port_id: string; + channel_id: string; + signer: string; +} +/** MsgChannelCloseInitResponse defines the Msg/ChannelCloseInit response type. */ +export interface MsgChannelCloseInitResponse { +} +/** MsgChannelCloseInitResponse defines the Msg/ChannelCloseInit response type. */ +export interface MsgChannelCloseInitResponseSDKType { +} +/** + * MsgChannelCloseConfirm defines a msg sent by a Relayer to Chain B + * to acknowledge the change of channel state to CLOSED on Chain A. + */ +export interface MsgChannelCloseConfirm { + portId: string; + channelId: string; + proofInit: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** + * MsgChannelCloseConfirm defines a msg sent by a Relayer to Chain B + * to acknowledge the change of channel state to CLOSED on Chain A. + */ +export interface MsgChannelCloseConfirmSDKType { + port_id: string; + channel_id: string; + proof_init: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** + * MsgChannelCloseConfirmResponse defines the Msg/ChannelCloseConfirm response + * type. + */ +export interface MsgChannelCloseConfirmResponse { +} +/** + * MsgChannelCloseConfirmResponse defines the Msg/ChannelCloseConfirm response + * type. + */ +export interface MsgChannelCloseConfirmResponseSDKType { +} +/** MsgRecvPacket receives incoming IBC packet */ +export interface MsgRecvPacket { + packet?: Packet; + proofCommitment: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** MsgRecvPacket receives incoming IBC packet */ +export interface MsgRecvPacketSDKType { + packet?: PacketSDKType; + proof_commitment: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** MsgRecvPacketResponse defines the Msg/RecvPacket response type. */ +export interface MsgRecvPacketResponse { +} +/** MsgRecvPacketResponse defines the Msg/RecvPacket response type. */ +export interface MsgRecvPacketResponseSDKType { +} +/** MsgTimeout receives timed-out packet */ +export interface MsgTimeout { + packet?: Packet; + proofUnreceived: Uint8Array; + proofHeight?: Height; + nextSequenceRecv: Long; + signer: string; +} +/** MsgTimeout receives timed-out packet */ +export interface MsgTimeoutSDKType { + packet?: PacketSDKType; + proof_unreceived: Uint8Array; + proof_height?: HeightSDKType; + next_sequence_recv: Long; + signer: string; +} +/** MsgTimeoutResponse defines the Msg/Timeout response type. */ +export interface MsgTimeoutResponse { +} +/** MsgTimeoutResponse defines the Msg/Timeout response type. */ +export interface MsgTimeoutResponseSDKType { +} +/** MsgTimeoutOnClose timed-out packet upon counterparty channel closure. */ +export interface MsgTimeoutOnClose { + packet?: Packet; + proofUnreceived: Uint8Array; + proofClose: Uint8Array; + proofHeight?: Height; + nextSequenceRecv: Long; + signer: string; +} +/** MsgTimeoutOnClose timed-out packet upon counterparty channel closure. */ +export interface MsgTimeoutOnCloseSDKType { + packet?: PacketSDKType; + proof_unreceived: Uint8Array; + proof_close: Uint8Array; + proof_height?: HeightSDKType; + next_sequence_recv: Long; + signer: string; +} +/** MsgTimeoutOnCloseResponse defines the Msg/TimeoutOnClose response type. */ +export interface MsgTimeoutOnCloseResponse { +} +/** MsgTimeoutOnCloseResponse defines the Msg/TimeoutOnClose response type. */ +export interface MsgTimeoutOnCloseResponseSDKType { +} +/** MsgAcknowledgement receives incoming IBC acknowledgement */ +export interface MsgAcknowledgement { + packet?: Packet; + acknowledgement: Uint8Array; + proofAcked: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** MsgAcknowledgement receives incoming IBC acknowledgement */ +export interface MsgAcknowledgementSDKType { + packet?: PacketSDKType; + acknowledgement: Uint8Array; + proof_acked: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** MsgAcknowledgementResponse defines the Msg/Acknowledgement response type. */ +export interface MsgAcknowledgementResponse { +} +/** MsgAcknowledgementResponse defines the Msg/Acknowledgement response type. */ +export interface MsgAcknowledgementResponseSDKType { +} +export declare const MsgChannelOpenInit: { + encode(message: MsgChannelOpenInit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenInit; + fromPartial(object: DeepPartial): MsgChannelOpenInit; +}; +export declare const MsgChannelOpenInitResponse: { + encode(_: MsgChannelOpenInitResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenInitResponse; + fromPartial(_: DeepPartial): MsgChannelOpenInitResponse; +}; +export declare const MsgChannelOpenTry: { + encode(message: MsgChannelOpenTry, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenTry; + fromPartial(object: DeepPartial): MsgChannelOpenTry; +}; +export declare const MsgChannelOpenTryResponse: { + encode(_: MsgChannelOpenTryResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenTryResponse; + fromPartial(_: DeepPartial): MsgChannelOpenTryResponse; +}; +export declare const MsgChannelOpenAck: { + encode(message: MsgChannelOpenAck, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenAck; + fromPartial(object: DeepPartial): MsgChannelOpenAck; +}; +export declare const MsgChannelOpenAckResponse: { + encode(_: MsgChannelOpenAckResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenAckResponse; + fromPartial(_: DeepPartial): MsgChannelOpenAckResponse; +}; +export declare const MsgChannelOpenConfirm: { + encode(message: MsgChannelOpenConfirm, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenConfirm; + fromPartial(object: DeepPartial): MsgChannelOpenConfirm; +}; +export declare const MsgChannelOpenConfirmResponse: { + encode(_: MsgChannelOpenConfirmResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenConfirmResponse; + fromPartial(_: DeepPartial): MsgChannelOpenConfirmResponse; +}; +export declare const MsgChannelCloseInit: { + encode(message: MsgChannelCloseInit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelCloseInit; + fromPartial(object: DeepPartial): MsgChannelCloseInit; +}; +export declare const MsgChannelCloseInitResponse: { + encode(_: MsgChannelCloseInitResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelCloseInitResponse; + fromPartial(_: DeepPartial): MsgChannelCloseInitResponse; +}; +export declare const MsgChannelCloseConfirm: { + encode(message: MsgChannelCloseConfirm, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelCloseConfirm; + fromPartial(object: DeepPartial): MsgChannelCloseConfirm; +}; +export declare const MsgChannelCloseConfirmResponse: { + encode(_: MsgChannelCloseConfirmResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelCloseConfirmResponse; + fromPartial(_: DeepPartial): MsgChannelCloseConfirmResponse; +}; +export declare const MsgRecvPacket: { + encode(message: MsgRecvPacket, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRecvPacket; + fromPartial(object: DeepPartial): MsgRecvPacket; +}; +export declare const MsgRecvPacketResponse: { + encode(_: MsgRecvPacketResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRecvPacketResponse; + fromPartial(_: DeepPartial): MsgRecvPacketResponse; +}; +export declare const MsgTimeout: { + encode(message: MsgTimeout, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgTimeout; + fromPartial(object: DeepPartial): MsgTimeout; +}; +export declare const MsgTimeoutResponse: { + encode(_: MsgTimeoutResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgTimeoutResponse; + fromPartial(_: DeepPartial): MsgTimeoutResponse; +}; +export declare const MsgTimeoutOnClose: { + encode(message: MsgTimeoutOnClose, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgTimeoutOnClose; + fromPartial(object: DeepPartial): MsgTimeoutOnClose; +}; +export declare const MsgTimeoutOnCloseResponse: { + encode(_: MsgTimeoutOnCloseResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgTimeoutOnCloseResponse; + fromPartial(_: DeepPartial): MsgTimeoutOnCloseResponse; +}; +export declare const MsgAcknowledgement: { + encode(message: MsgAcknowledgement, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAcknowledgement; + fromPartial(object: DeepPartial): MsgAcknowledgement; +}; +export declare const MsgAcknowledgementResponse: { + encode(_: MsgAcknowledgementResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAcknowledgementResponse; + fromPartial(_: DeepPartial): MsgAcknowledgementResponse; +}; diff --git a/packages/codegen/dist/ibc/core/channel/v1/tx.rpc.msg.d.ts b/packages/codegen/dist/ibc/core/channel/v1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..2125f0e9 --- /dev/null +++ b/packages/codegen/dist/ibc/core/channel/v1/tx.rpc.msg.d.ts @@ -0,0 +1,42 @@ +import { Rpc } from "../../../../helpers"; +import { MsgChannelOpenInit, MsgChannelOpenInitResponse, MsgChannelOpenTry, MsgChannelOpenTryResponse, MsgChannelOpenAck, MsgChannelOpenAckResponse, MsgChannelOpenConfirm, MsgChannelOpenConfirmResponse, MsgChannelCloseInit, MsgChannelCloseInitResponse, MsgChannelCloseConfirm, MsgChannelCloseConfirmResponse, MsgRecvPacket, MsgRecvPacketResponse, MsgTimeout, MsgTimeoutResponse, MsgTimeoutOnClose, MsgTimeoutOnCloseResponse, MsgAcknowledgement, MsgAcknowledgementResponse } from "./tx"; +/** Msg defines the ibc/channel Msg service. */ +export interface Msg { + /** ChannelOpenInit defines a rpc handler method for MsgChannelOpenInit. */ + channelOpenInit(request: MsgChannelOpenInit): Promise; + /** ChannelOpenTry defines a rpc handler method for MsgChannelOpenTry. */ + channelOpenTry(request: MsgChannelOpenTry): Promise; + /** ChannelOpenAck defines a rpc handler method for MsgChannelOpenAck. */ + channelOpenAck(request: MsgChannelOpenAck): Promise; + /** ChannelOpenConfirm defines a rpc handler method for MsgChannelOpenConfirm. */ + channelOpenConfirm(request: MsgChannelOpenConfirm): Promise; + /** ChannelCloseInit defines a rpc handler method for MsgChannelCloseInit. */ + channelCloseInit(request: MsgChannelCloseInit): Promise; + /** + * ChannelCloseConfirm defines a rpc handler method for + * MsgChannelCloseConfirm. + */ + channelCloseConfirm(request: MsgChannelCloseConfirm): Promise; + /** RecvPacket defines a rpc handler method for MsgRecvPacket. */ + recvPacket(request: MsgRecvPacket): Promise; + /** Timeout defines a rpc handler method for MsgTimeout. */ + timeout(request: MsgTimeout): Promise; + /** TimeoutOnClose defines a rpc handler method for MsgTimeoutOnClose. */ + timeoutOnClose(request: MsgTimeoutOnClose): Promise; + /** Acknowledgement defines a rpc handler method for MsgAcknowledgement. */ + acknowledgement(request: MsgAcknowledgement): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + channelOpenInit(request: MsgChannelOpenInit): Promise; + channelOpenTry(request: MsgChannelOpenTry): Promise; + channelOpenAck(request: MsgChannelOpenAck): Promise; + channelOpenConfirm(request: MsgChannelOpenConfirm): Promise; + channelCloseInit(request: MsgChannelCloseInit): Promise; + channelCloseConfirm(request: MsgChannelCloseConfirm): Promise; + recvPacket(request: MsgRecvPacket): Promise; + timeout(request: MsgTimeout): Promise; + timeoutOnClose(request: MsgTimeoutOnClose): Promise; + acknowledgement(request: MsgAcknowledgement): Promise; +} diff --git a/packages/codegen/dist/ibc/core/client/v1/client.d.ts b/packages/codegen/dist/ibc/core/client/v1/client.d.ts new file mode 100644 index 00000000..63d0e24f --- /dev/null +++ b/packages/codegen/dist/ibc/core/client/v1/client.d.ts @@ -0,0 +1,196 @@ +/// +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { Plan, PlanSDKType } from "../../../../cosmos/upgrade/v1beta1/upgrade"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * IdentifiedClientState defines a client state with an additional client + * identifier field. + */ +export interface IdentifiedClientState { + /** client identifier */ + clientId: string; + /** client state */ + clientState?: Any; +} +/** + * IdentifiedClientState defines a client state with an additional client + * identifier field. + */ +export interface IdentifiedClientStateSDKType { + client_id: string; + client_state?: AnySDKType; +} +/** + * ConsensusStateWithHeight defines a consensus state with an additional height + * field. + */ +export interface ConsensusStateWithHeight { + /** consensus state height */ + height?: Height; + /** consensus state */ + consensusState?: Any; +} +/** + * ConsensusStateWithHeight defines a consensus state with an additional height + * field. + */ +export interface ConsensusStateWithHeightSDKType { + height?: HeightSDKType; + consensus_state?: AnySDKType; +} +/** + * ClientConsensusStates defines all the stored consensus states for a given + * client. + */ +export interface ClientConsensusStates { + /** client identifier */ + clientId: string; + /** consensus states and their heights associated with the client */ + consensusStates: ConsensusStateWithHeight[]; +} +/** + * ClientConsensusStates defines all the stored consensus states for a given + * client. + */ +export interface ClientConsensusStatesSDKType { + client_id: string; + consensus_states: ConsensusStateWithHeightSDKType[]; +} +/** + * ClientUpdateProposal is a governance proposal. If it passes, the substitute + * client's latest consensus state is copied over to the subject client. The proposal + * handler may fail if the subject and the substitute do not match in client and + * chain parameters (with exception to latest height, frozen height, and chain-id). + */ +export interface ClientUpdateProposal { + /** the title of the update proposal */ + title: string; + /** the description of the proposal */ + description: string; + /** the client identifier for the client to be updated if the proposal passes */ + subjectClientId: string; + /** + * the substitute client identifier for the client standing in for the subject + * client + */ + substituteClientId: string; +} +/** + * ClientUpdateProposal is a governance proposal. If it passes, the substitute + * client's latest consensus state is copied over to the subject client. The proposal + * handler may fail if the subject and the substitute do not match in client and + * chain parameters (with exception to latest height, frozen height, and chain-id). + */ +export interface ClientUpdateProposalSDKType { + title: string; + description: string; + subject_client_id: string; + substitute_client_id: string; +} +/** + * UpgradeProposal is a gov Content type for initiating an IBC breaking + * upgrade. + */ +export interface UpgradeProposal { + title: string; + description: string; + plan?: Plan; + /** + * An UpgradedClientState must be provided to perform an IBC breaking upgrade. + * This will make the chain commit to the correct upgraded (self) client state + * before the upgrade occurs, so that connecting chains can verify that the + * new upgraded client is valid by verifying a proof on the previous version + * of the chain. This will allow IBC connections to persist smoothly across + * planned chain upgrades + */ + upgradedClientState?: Any; +} +/** + * UpgradeProposal is a gov Content type for initiating an IBC breaking + * upgrade. + */ +export interface UpgradeProposalSDKType { + title: string; + description: string; + plan?: PlanSDKType; + upgraded_client_state?: AnySDKType; +} +/** + * Height is a monotonically increasing data type + * that can be compared against another Height for the purposes of updating and + * freezing clients + * + * Normally the RevisionHeight is incremented at each height while keeping + * RevisionNumber the same. However some consensus algorithms may choose to + * reset the height in certain conditions e.g. hard forks, state-machine + * breaking changes In these cases, the RevisionNumber is incremented so that + * height continues to be monitonically increasing even as the RevisionHeight + * gets reset + */ +export interface Height { + /** the revision that the client is currently on */ + revisionNumber: Long; + /** the height within the given revision */ + revisionHeight: Long; +} +/** + * Height is a monotonically increasing data type + * that can be compared against another Height for the purposes of updating and + * freezing clients + * + * Normally the RevisionHeight is incremented at each height while keeping + * RevisionNumber the same. However some consensus algorithms may choose to + * reset the height in certain conditions e.g. hard forks, state-machine + * breaking changes In these cases, the RevisionNumber is incremented so that + * height continues to be monitonically increasing even as the RevisionHeight + * gets reset + */ +export interface HeightSDKType { + revision_number: Long; + revision_height: Long; +} +/** Params defines the set of IBC light client parameters. */ +export interface Params { + /** allowed_clients defines the list of allowed client state types. */ + allowedClients: string[]; +} +/** Params defines the set of IBC light client parameters. */ +export interface ParamsSDKType { + allowed_clients: string[]; +} +export declare const IdentifiedClientState: { + encode(message: IdentifiedClientState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): IdentifiedClientState; + fromPartial(object: DeepPartial): IdentifiedClientState; +}; +export declare const ConsensusStateWithHeight: { + encode(message: ConsensusStateWithHeight, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusStateWithHeight; + fromPartial(object: DeepPartial): ConsensusStateWithHeight; +}; +export declare const ClientConsensusStates: { + encode(message: ClientConsensusStates, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ClientConsensusStates; + fromPartial(object: DeepPartial): ClientConsensusStates; +}; +export declare const ClientUpdateProposal: { + encode(message: ClientUpdateProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ClientUpdateProposal; + fromPartial(object: DeepPartial): ClientUpdateProposal; +}; +export declare const UpgradeProposal: { + encode(message: UpgradeProposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): UpgradeProposal; + fromPartial(object: DeepPartial): UpgradeProposal; +}; +export declare const Height: { + encode(message: Height, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Height; + fromPartial(object: DeepPartial): Height; +}; +export declare const Params: { + encode(message: Params, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Params; + fromPartial(object: DeepPartial): Params; +}; diff --git a/packages/codegen/dist/ibc/core/client/v1/genesis.d.ts b/packages/codegen/dist/ibc/core/client/v1/genesis.d.ts new file mode 100644 index 00000000..52aac222 --- /dev/null +++ b/packages/codegen/dist/ibc/core/client/v1/genesis.d.ts @@ -0,0 +1,76 @@ +/// +import { IdentifiedClientState, IdentifiedClientStateSDKType, ClientConsensusStates, ClientConsensusStatesSDKType, Params, ParamsSDKType } from "./client"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisState defines the ibc client submodule's genesis state. */ +export interface GenesisState { + /** client states with their corresponding identifiers */ + clients: IdentifiedClientState[]; + /** consensus states from each client */ + clientsConsensus: ClientConsensusStates[]; + /** metadata from each client */ + clientsMetadata: IdentifiedGenesisMetadata[]; + params?: Params; + /** create localhost on initialization */ + createLocalhost: boolean; + /** the sequence for the next generated client identifier */ + nextClientSequence: Long; +} +/** GenesisState defines the ibc client submodule's genesis state. */ +export interface GenesisStateSDKType { + clients: IdentifiedClientStateSDKType[]; + clients_consensus: ClientConsensusStatesSDKType[]; + clients_metadata: IdentifiedGenesisMetadataSDKType[]; + params?: ParamsSDKType; + create_localhost: boolean; + next_client_sequence: Long; +} +/** + * GenesisMetadata defines the genesis type for metadata that clients may return + * with ExportMetadata + */ +export interface GenesisMetadata { + /** store key of metadata without clientID-prefix */ + key: Uint8Array; + /** metadata value */ + value: Uint8Array; +} +/** + * GenesisMetadata defines the genesis type for metadata that clients may return + * with ExportMetadata + */ +export interface GenesisMetadataSDKType { + key: Uint8Array; + value: Uint8Array; +} +/** + * IdentifiedGenesisMetadata has the client metadata with the corresponding + * client id. + */ +export interface IdentifiedGenesisMetadata { + clientId: string; + clientMetadata: GenesisMetadata[]; +} +/** + * IdentifiedGenesisMetadata has the client metadata with the corresponding + * client id. + */ +export interface IdentifiedGenesisMetadataSDKType { + client_id: string; + client_metadata: GenesisMetadataSDKType[]; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; +export declare const GenesisMetadata: { + encode(message: GenesisMetadata, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisMetadata; + fromPartial(object: DeepPartial): GenesisMetadata; +}; +export declare const IdentifiedGenesisMetadata: { + encode(message: IdentifiedGenesisMetadata, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): IdentifiedGenesisMetadata; + fromPartial(object: DeepPartial): IdentifiedGenesisMetadata; +}; diff --git a/packages/codegen/dist/ibc/core/client/v1/query.d.ts b/packages/codegen/dist/ibc/core/client/v1/query.d.ts new file mode 100644 index 00000000..746ff675 --- /dev/null +++ b/packages/codegen/dist/ibc/core/client/v1/query.d.ts @@ -0,0 +1,353 @@ +/// +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../../../cosmos/base/query/v1beta1/pagination"; +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { Height, HeightSDKType, IdentifiedClientState, IdentifiedClientStateSDKType, ConsensusStateWithHeight, ConsensusStateWithHeightSDKType, Params, ParamsSDKType } from "./client"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * QueryClientStateRequest is the request type for the Query/ClientState RPC + * method + */ +export interface QueryClientStateRequest { + /** client state unique identifier */ + clientId: string; +} +/** + * QueryClientStateRequest is the request type for the Query/ClientState RPC + * method + */ +export interface QueryClientStateRequestSDKType { + client_id: string; +} +/** + * QueryClientStateResponse is the response type for the Query/ClientState RPC + * method. Besides the client state, it includes a proof and the height from + * which the proof was retrieved. + */ +export interface QueryClientStateResponse { + /** client state associated with the request identifier */ + clientState?: Any; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was retrieved */ + proofHeight?: Height; +} +/** + * QueryClientStateResponse is the response type for the Query/ClientState RPC + * method. Besides the client state, it includes a proof and the height from + * which the proof was retrieved. + */ +export interface QueryClientStateResponseSDKType { + client_state?: AnySDKType; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryClientStatesRequest is the request type for the Query/ClientStates RPC + * method + */ +export interface QueryClientStatesRequest { + /** pagination request */ + pagination?: PageRequest; +} +/** + * QueryClientStatesRequest is the request type for the Query/ClientStates RPC + * method + */ +export interface QueryClientStatesRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryClientStatesResponse is the response type for the Query/ClientStates RPC + * method. + */ +export interface QueryClientStatesResponse { + /** list of stored ClientStates of the chain. */ + clientStates: IdentifiedClientState[]; + /** pagination response */ + pagination?: PageResponse; +} +/** + * QueryClientStatesResponse is the response type for the Query/ClientStates RPC + * method. + */ +export interface QueryClientStatesResponseSDKType { + client_states: IdentifiedClientStateSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryConsensusStateRequest is the request type for the Query/ConsensusState + * RPC method. Besides the consensus state, it includes a proof and the height + * from which the proof was retrieved. + */ +export interface QueryConsensusStateRequest { + /** client identifier */ + clientId: string; + /** consensus state revision number */ + revisionNumber: Long; + /** consensus state revision height */ + revisionHeight: Long; + /** + * latest_height overrrides the height field and queries the latest stored + * ConsensusState + */ + latestHeight: boolean; +} +/** + * QueryConsensusStateRequest is the request type for the Query/ConsensusState + * RPC method. Besides the consensus state, it includes a proof and the height + * from which the proof was retrieved. + */ +export interface QueryConsensusStateRequestSDKType { + client_id: string; + revision_number: Long; + revision_height: Long; + latest_height: boolean; +} +/** + * QueryConsensusStateResponse is the response type for the Query/ConsensusState + * RPC method + */ +export interface QueryConsensusStateResponse { + /** consensus state associated with the client identifier at the given height */ + consensusState?: Any; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was retrieved */ + proofHeight?: Height; +} +/** + * QueryConsensusStateResponse is the response type for the Query/ConsensusState + * RPC method + */ +export interface QueryConsensusStateResponseSDKType { + consensus_state?: AnySDKType; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryConsensusStatesRequest is the request type for the Query/ConsensusStates + * RPC method. + */ +export interface QueryConsensusStatesRequest { + /** client identifier */ + clientId: string; + /** pagination request */ + pagination?: PageRequest; +} +/** + * QueryConsensusStatesRequest is the request type for the Query/ConsensusStates + * RPC method. + */ +export interface QueryConsensusStatesRequestSDKType { + client_id: string; + pagination?: PageRequestSDKType; +} +/** + * QueryConsensusStatesResponse is the response type for the + * Query/ConsensusStates RPC method + */ +export interface QueryConsensusStatesResponse { + /** consensus states associated with the identifier */ + consensusStates: ConsensusStateWithHeight[]; + /** pagination response */ + pagination?: PageResponse; +} +/** + * QueryConsensusStatesResponse is the response type for the + * Query/ConsensusStates RPC method + */ +export interface QueryConsensusStatesResponseSDKType { + consensus_states: ConsensusStateWithHeightSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryClientStatusRequest is the request type for the Query/ClientStatus RPC + * method + */ +export interface QueryClientStatusRequest { + /** client unique identifier */ + clientId: string; +} +/** + * QueryClientStatusRequest is the request type for the Query/ClientStatus RPC + * method + */ +export interface QueryClientStatusRequestSDKType { + client_id: string; +} +/** + * QueryClientStatusResponse is the response type for the Query/ClientStatus RPC + * method. It returns the current status of the IBC client. + */ +export interface QueryClientStatusResponse { + status: string; +} +/** + * QueryClientStatusResponse is the response type for the Query/ClientStatus RPC + * method. It returns the current status of the IBC client. + */ +export interface QueryClientStatusResponseSDKType { + status: string; +} +/** + * QueryClientParamsRequest is the request type for the Query/ClientParams RPC + * method. + */ +export interface QueryClientParamsRequest { +} +/** + * QueryClientParamsRequest is the request type for the Query/ClientParams RPC + * method. + */ +export interface QueryClientParamsRequestSDKType { +} +/** + * QueryClientParamsResponse is the response type for the Query/ClientParams RPC + * method. + */ +export interface QueryClientParamsResponse { + /** params defines the parameters of the module. */ + params?: Params; +} +/** + * QueryClientParamsResponse is the response type for the Query/ClientParams RPC + * method. + */ +export interface QueryClientParamsResponseSDKType { + params?: ParamsSDKType; +} +/** + * QueryUpgradedClientStateRequest is the request type for the + * Query/UpgradedClientState RPC method + */ +export interface QueryUpgradedClientStateRequest { +} +/** + * QueryUpgradedClientStateRequest is the request type for the + * Query/UpgradedClientState RPC method + */ +export interface QueryUpgradedClientStateRequestSDKType { +} +/** + * QueryUpgradedClientStateResponse is the response type for the + * Query/UpgradedClientState RPC method. + */ +export interface QueryUpgradedClientStateResponse { + /** client state associated with the request identifier */ + upgradedClientState?: Any; +} +/** + * QueryUpgradedClientStateResponse is the response type for the + * Query/UpgradedClientState RPC method. + */ +export interface QueryUpgradedClientStateResponseSDKType { + upgraded_client_state?: AnySDKType; +} +/** + * QueryUpgradedConsensusStateRequest is the request type for the + * Query/UpgradedConsensusState RPC method + */ +export interface QueryUpgradedConsensusStateRequest { +} +/** + * QueryUpgradedConsensusStateRequest is the request type for the + * Query/UpgradedConsensusState RPC method + */ +export interface QueryUpgradedConsensusStateRequestSDKType { +} +/** + * QueryUpgradedConsensusStateResponse is the response type for the + * Query/UpgradedConsensusState RPC method. + */ +export interface QueryUpgradedConsensusStateResponse { + /** Consensus state associated with the request identifier */ + upgradedConsensusState?: Any; +} +/** + * QueryUpgradedConsensusStateResponse is the response type for the + * Query/UpgradedConsensusState RPC method. + */ +export interface QueryUpgradedConsensusStateResponseSDKType { + upgraded_consensus_state?: AnySDKType; +} +export declare const QueryClientStateRequest: { + encode(message: QueryClientStateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientStateRequest; + fromPartial(object: DeepPartial): QueryClientStateRequest; +}; +export declare const QueryClientStateResponse: { + encode(message: QueryClientStateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientStateResponse; + fromPartial(object: DeepPartial): QueryClientStateResponse; +}; +export declare const QueryClientStatesRequest: { + encode(message: QueryClientStatesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientStatesRequest; + fromPartial(object: DeepPartial): QueryClientStatesRequest; +}; +export declare const QueryClientStatesResponse: { + encode(message: QueryClientStatesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientStatesResponse; + fromPartial(object: DeepPartial): QueryClientStatesResponse; +}; +export declare const QueryConsensusStateRequest: { + encode(message: QueryConsensusStateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConsensusStateRequest; + fromPartial(object: DeepPartial): QueryConsensusStateRequest; +}; +export declare const QueryConsensusStateResponse: { + encode(message: QueryConsensusStateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConsensusStateResponse; + fromPartial(object: DeepPartial): QueryConsensusStateResponse; +}; +export declare const QueryConsensusStatesRequest: { + encode(message: QueryConsensusStatesRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConsensusStatesRequest; + fromPartial(object: DeepPartial): QueryConsensusStatesRequest; +}; +export declare const QueryConsensusStatesResponse: { + encode(message: QueryConsensusStatesResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConsensusStatesResponse; + fromPartial(object: DeepPartial): QueryConsensusStatesResponse; +}; +export declare const QueryClientStatusRequest: { + encode(message: QueryClientStatusRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientStatusRequest; + fromPartial(object: DeepPartial): QueryClientStatusRequest; +}; +export declare const QueryClientStatusResponse: { + encode(message: QueryClientStatusResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientStatusResponse; + fromPartial(object: DeepPartial): QueryClientStatusResponse; +}; +export declare const QueryClientParamsRequest: { + encode(_: QueryClientParamsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientParamsRequest; + fromPartial(_: DeepPartial): QueryClientParamsRequest; +}; +export declare const QueryClientParamsResponse: { + encode(message: QueryClientParamsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientParamsResponse; + fromPartial(object: DeepPartial): QueryClientParamsResponse; +}; +export declare const QueryUpgradedClientStateRequest: { + encode(_: QueryUpgradedClientStateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedClientStateRequest; + fromPartial(_: DeepPartial): QueryUpgradedClientStateRequest; +}; +export declare const QueryUpgradedClientStateResponse: { + encode(message: QueryUpgradedClientStateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedClientStateResponse; + fromPartial(object: DeepPartial): QueryUpgradedClientStateResponse; +}; +export declare const QueryUpgradedConsensusStateRequest: { + encode(_: QueryUpgradedConsensusStateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedConsensusStateRequest; + fromPartial(_: DeepPartial): QueryUpgradedConsensusStateRequest; +}; +export declare const QueryUpgradedConsensusStateResponse: { + encode(message: QueryUpgradedConsensusStateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedConsensusStateResponse; + fromPartial(object: DeepPartial): QueryUpgradedConsensusStateResponse; +}; diff --git a/packages/codegen/dist/ibc/core/client/v1/query.lcd.d.ts b/packages/codegen/dist/ibc/core/client/v1/query.lcd.d.ts new file mode 100644 index 00000000..87e88d3b --- /dev/null +++ b/packages/codegen/dist/ibc/core/client/v1/query.lcd.d.ts @@ -0,0 +1,16 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryClientStateRequest, QueryClientStateResponseSDKType, QueryClientStatesRequest, QueryClientStatesResponseSDKType, QueryConsensusStateRequest, QueryConsensusStateResponseSDKType, QueryConsensusStatesRequest, QueryConsensusStatesResponseSDKType, QueryClientStatusRequest, QueryClientStatusResponseSDKType, QueryClientParamsRequest, QueryClientParamsResponseSDKType, QueryUpgradedClientStateRequest, QueryUpgradedClientStateResponseSDKType, QueryUpgradedConsensusStateRequest, QueryUpgradedConsensusStateResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + clientState(params: QueryClientStateRequest): Promise; + clientStates(params?: QueryClientStatesRequest): Promise; + consensusState(params: QueryConsensusStateRequest): Promise; + consensusStates(params: QueryConsensusStatesRequest): Promise; + clientStatus(params: QueryClientStatusRequest): Promise; + clientParams(_params?: QueryClientParamsRequest): Promise; + upgradedClientState(_params?: QueryUpgradedClientStateRequest): Promise; + upgradedConsensusState(_params?: QueryUpgradedConsensusStateRequest): Promise; +} diff --git a/packages/codegen/dist/ibc/core/client/v1/query.rpc.Query.d.ts b/packages/codegen/dist/ibc/core/client/v1/query.rpc.Query.d.ts new file mode 100644 index 00000000..ae264ca1 --- /dev/null +++ b/packages/codegen/dist/ibc/core/client/v1/query.rpc.Query.d.ts @@ -0,0 +1,50 @@ +import { Rpc } from "../../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryClientStateRequest, QueryClientStateResponse, QueryClientStatesRequest, QueryClientStatesResponse, QueryConsensusStateRequest, QueryConsensusStateResponse, QueryConsensusStatesRequest, QueryConsensusStatesResponse, QueryClientStatusRequest, QueryClientStatusResponse, QueryClientParamsRequest, QueryClientParamsResponse, QueryUpgradedClientStateRequest, QueryUpgradedClientStateResponse, QueryUpgradedConsensusStateRequest, QueryUpgradedConsensusStateResponse } from "./query"; +/** Query provides defines the gRPC querier service */ +export interface Query { + /** ClientState queries an IBC light client. */ + clientState(request: QueryClientStateRequest): Promise; + /** ClientStates queries all the IBC light clients of a chain. */ + clientStates(request?: QueryClientStatesRequest): Promise; + /** + * ConsensusState queries a consensus state associated with a client state at + * a given height. + */ + consensusState(request: QueryConsensusStateRequest): Promise; + /** + * ConsensusStates queries all the consensus state associated with a given + * client. + */ + consensusStates(request: QueryConsensusStatesRequest): Promise; + /** Status queries the status of an IBC client. */ + clientStatus(request: QueryClientStatusRequest): Promise; + /** ClientParams queries all parameters of the ibc client. */ + clientParams(request?: QueryClientParamsRequest): Promise; + /** UpgradedClientState queries an Upgraded IBC light client. */ + upgradedClientState(request?: QueryUpgradedClientStateRequest): Promise; + /** UpgradedConsensusState queries an Upgraded IBC consensus state. */ + upgradedConsensusState(request?: QueryUpgradedConsensusStateRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + clientState(request: QueryClientStateRequest): Promise; + clientStates(request?: QueryClientStatesRequest): Promise; + consensusState(request: QueryConsensusStateRequest): Promise; + consensusStates(request: QueryConsensusStatesRequest): Promise; + clientStatus(request: QueryClientStatusRequest): Promise; + clientParams(request?: QueryClientParamsRequest): Promise; + upgradedClientState(request?: QueryUpgradedClientStateRequest): Promise; + upgradedConsensusState(request?: QueryUpgradedConsensusStateRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + clientState(request: QueryClientStateRequest): Promise; + clientStates(request?: QueryClientStatesRequest): Promise; + consensusState(request: QueryConsensusStateRequest): Promise; + consensusStates(request: QueryConsensusStatesRequest): Promise; + clientStatus(request: QueryClientStatusRequest): Promise; + clientParams(request?: QueryClientParamsRequest): Promise; + upgradedClientState(request?: QueryUpgradedClientStateRequest): Promise; + upgradedConsensusState(request?: QueryUpgradedConsensusStateRequest): Promise; +}; diff --git a/packages/codegen/dist/ibc/core/client/v1/tx.d.ts b/packages/codegen/dist/ibc/core/client/v1/tx.d.ts new file mode 100644 index 00000000..b4cf8495 --- /dev/null +++ b/packages/codegen/dist/ibc/core/client/v1/tx.d.ts @@ -0,0 +1,166 @@ +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** MsgCreateClient defines a message to create an IBC client */ +export interface MsgCreateClient { + /** light client state */ + clientState?: Any; + /** + * consensus state associated with the client that corresponds to a given + * height. + */ + consensusState?: Any; + /** signer address */ + signer: string; +} +/** MsgCreateClient defines a message to create an IBC client */ +export interface MsgCreateClientSDKType { + client_state?: AnySDKType; + consensus_state?: AnySDKType; + signer: string; +} +/** MsgCreateClientResponse defines the Msg/CreateClient response type. */ +export interface MsgCreateClientResponse { +} +/** MsgCreateClientResponse defines the Msg/CreateClient response type. */ +export interface MsgCreateClientResponseSDKType { +} +/** + * MsgUpdateClient defines an sdk.Msg to update a IBC client state using + * the given header. + */ +export interface MsgUpdateClient { + /** client unique identifier */ + clientId: string; + /** header to update the light client */ + header?: Any; + /** signer address */ + signer: string; +} +/** + * MsgUpdateClient defines an sdk.Msg to update a IBC client state using + * the given header. + */ +export interface MsgUpdateClientSDKType { + client_id: string; + header?: AnySDKType; + signer: string; +} +/** MsgUpdateClientResponse defines the Msg/UpdateClient response type. */ +export interface MsgUpdateClientResponse { +} +/** MsgUpdateClientResponse defines the Msg/UpdateClient response type. */ +export interface MsgUpdateClientResponseSDKType { +} +/** + * MsgUpgradeClient defines an sdk.Msg to upgrade an IBC client to a new client + * state + */ +export interface MsgUpgradeClient { + /** client unique identifier */ + clientId: string; + /** upgraded client state */ + clientState?: Any; + /** + * upgraded consensus state, only contains enough information to serve as a + * basis of trust in update logic + */ + consensusState?: Any; + /** proof that old chain committed to new client */ + proofUpgradeClient: Uint8Array; + /** proof that old chain committed to new consensus state */ + proofUpgradeConsensusState: Uint8Array; + /** signer address */ + signer: string; +} +/** + * MsgUpgradeClient defines an sdk.Msg to upgrade an IBC client to a new client + * state + */ +export interface MsgUpgradeClientSDKType { + client_id: string; + client_state?: AnySDKType; + consensus_state?: AnySDKType; + proof_upgrade_client: Uint8Array; + proof_upgrade_consensus_state: Uint8Array; + signer: string; +} +/** MsgUpgradeClientResponse defines the Msg/UpgradeClient response type. */ +export interface MsgUpgradeClientResponse { +} +/** MsgUpgradeClientResponse defines the Msg/UpgradeClient response type. */ +export interface MsgUpgradeClientResponseSDKType { +} +/** + * MsgSubmitMisbehaviour defines an sdk.Msg type that submits Evidence for + * light client misbehaviour. + */ +export interface MsgSubmitMisbehaviour { + /** client unique identifier */ + clientId: string; + /** misbehaviour used for freezing the light client */ + misbehaviour?: Any; + /** signer address */ + signer: string; +} +/** + * MsgSubmitMisbehaviour defines an sdk.Msg type that submits Evidence for + * light client misbehaviour. + */ +export interface MsgSubmitMisbehaviourSDKType { + client_id: string; + misbehaviour?: AnySDKType; + signer: string; +} +/** + * MsgSubmitMisbehaviourResponse defines the Msg/SubmitMisbehaviour response + * type. + */ +export interface MsgSubmitMisbehaviourResponse { +} +/** + * MsgSubmitMisbehaviourResponse defines the Msg/SubmitMisbehaviour response + * type. + */ +export interface MsgSubmitMisbehaviourResponseSDKType { +} +export declare const MsgCreateClient: { + encode(message: MsgCreateClient, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateClient; + fromPartial(object: DeepPartial): MsgCreateClient; +}; +export declare const MsgCreateClientResponse: { + encode(_: MsgCreateClientResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateClientResponse; + fromPartial(_: DeepPartial): MsgCreateClientResponse; +}; +export declare const MsgUpdateClient: { + encode(message: MsgUpdateClient, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateClient; + fromPartial(object: DeepPartial): MsgUpdateClient; +}; +export declare const MsgUpdateClientResponse: { + encode(_: MsgUpdateClientResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateClientResponse; + fromPartial(_: DeepPartial): MsgUpdateClientResponse; +}; +export declare const MsgUpgradeClient: { + encode(message: MsgUpgradeClient, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpgradeClient; + fromPartial(object: DeepPartial): MsgUpgradeClient; +}; +export declare const MsgUpgradeClientResponse: { + encode(_: MsgUpgradeClientResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpgradeClientResponse; + fromPartial(_: DeepPartial): MsgUpgradeClientResponse; +}; +export declare const MsgSubmitMisbehaviour: { + encode(message: MsgSubmitMisbehaviour, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitMisbehaviour; + fromPartial(object: DeepPartial): MsgSubmitMisbehaviour; +}; +export declare const MsgSubmitMisbehaviourResponse: { + encode(_: MsgSubmitMisbehaviourResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitMisbehaviourResponse; + fromPartial(_: DeepPartial): MsgSubmitMisbehaviourResponse; +}; diff --git a/packages/codegen/dist/ibc/core/client/v1/tx.rpc.msg.d.ts b/packages/codegen/dist/ibc/core/client/v1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..d49d180d --- /dev/null +++ b/packages/codegen/dist/ibc/core/client/v1/tx.rpc.msg.d.ts @@ -0,0 +1,21 @@ +import { Rpc } from "../../../../helpers"; +import { MsgCreateClient, MsgCreateClientResponse, MsgUpdateClient, MsgUpdateClientResponse, MsgUpgradeClient, MsgUpgradeClientResponse, MsgSubmitMisbehaviour, MsgSubmitMisbehaviourResponse } from "./tx"; +/** Msg defines the ibc/client Msg service. */ +export interface Msg { + /** CreateClient defines a rpc handler method for MsgCreateClient. */ + createClient(request: MsgCreateClient): Promise; + /** UpdateClient defines a rpc handler method for MsgUpdateClient. */ + updateClient(request: MsgUpdateClient): Promise; + /** UpgradeClient defines a rpc handler method for MsgUpgradeClient. */ + upgradeClient(request: MsgUpgradeClient): Promise; + /** SubmitMisbehaviour defines a rpc handler method for MsgSubmitMisbehaviour. */ + submitMisbehaviour(request: MsgSubmitMisbehaviour): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + createClient(request: MsgCreateClient): Promise; + updateClient(request: MsgUpdateClient): Promise; + upgradeClient(request: MsgUpgradeClient): Promise; + submitMisbehaviour(request: MsgSubmitMisbehaviour): Promise; +} diff --git a/packages/codegen/dist/ibc/core/commitment/v1/commitment.d.ts b/packages/codegen/dist/ibc/core/commitment/v1/commitment.d.ts new file mode 100644 index 00000000..35419c6d --- /dev/null +++ b/packages/codegen/dist/ibc/core/commitment/v1/commitment.d.ts @@ -0,0 +1,89 @@ +import { CommitmentProof, CommitmentProofSDKType } from "../../../../confio/proofs"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * MerkleRoot defines a merkle root hash. + * In the Cosmos SDK, the AppHash of a block header becomes the root. + */ +export interface MerkleRoot { + hash: Uint8Array; +} +/** + * MerkleRoot defines a merkle root hash. + * In the Cosmos SDK, the AppHash of a block header becomes the root. + */ +export interface MerkleRootSDKType { + hash: Uint8Array; +} +/** + * MerklePrefix is merkle path prefixed to the key. + * The constructed key from the Path and the key will be append(Path.KeyPath, + * append(Path.KeyPrefix, key...)) + */ +export interface MerklePrefix { + keyPrefix: Uint8Array; +} +/** + * MerklePrefix is merkle path prefixed to the key. + * The constructed key from the Path and the key will be append(Path.KeyPath, + * append(Path.KeyPrefix, key...)) + */ +export interface MerklePrefixSDKType { + key_prefix: Uint8Array; +} +/** + * MerklePath is the path used to verify commitment proofs, which can be an + * arbitrary structured object (defined by a commitment type). + * MerklePath is represented from root-to-leaf + */ +export interface MerklePath { + keyPath: string[]; +} +/** + * MerklePath is the path used to verify commitment proofs, which can be an + * arbitrary structured object (defined by a commitment type). + * MerklePath is represented from root-to-leaf + */ +export interface MerklePathSDKType { + key_path: string[]; +} +/** + * MerkleProof is a wrapper type over a chain of CommitmentProofs. + * It demonstrates membership or non-membership for an element or set of + * elements, verifiable in conjunction with a known commitment root. Proofs + * should be succinct. + * MerkleProofs are ordered from leaf-to-root + */ +export interface MerkleProof { + proofs: CommitmentProof[]; +} +/** + * MerkleProof is a wrapper type over a chain of CommitmentProofs. + * It demonstrates membership or non-membership for an element or set of + * elements, verifiable in conjunction with a known commitment root. Proofs + * should be succinct. + * MerkleProofs are ordered from leaf-to-root + */ +export interface MerkleProofSDKType { + proofs: CommitmentProofSDKType[]; +} +export declare const MerkleRoot: { + encode(message: MerkleRoot, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MerkleRoot; + fromPartial(object: DeepPartial): MerkleRoot; +}; +export declare const MerklePrefix: { + encode(message: MerklePrefix, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MerklePrefix; + fromPartial(object: DeepPartial): MerklePrefix; +}; +export declare const MerklePath: { + encode(message: MerklePath, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MerklePath; + fromPartial(object: DeepPartial): MerklePath; +}; +export declare const MerkleProof: { + encode(message: MerkleProof, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MerkleProof; + fromPartial(object: DeepPartial): MerkleProof; +}; diff --git a/packages/codegen/dist/ibc/core/connection/v1/connection.d.ts b/packages/codegen/dist/ibc/core/connection/v1/connection.d.ts new file mode 100644 index 00000000..3e78a800 --- /dev/null +++ b/packages/codegen/dist/ibc/core/connection/v1/connection.d.ts @@ -0,0 +1,204 @@ +/// +import { MerklePrefix, MerklePrefixSDKType } from "../../commitment/v1/commitment"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * State defines if a connection is in one of the following states: + * INIT, TRYOPEN, OPEN or UNINITIALIZED. + */ +export declare enum State { + /** STATE_UNINITIALIZED_UNSPECIFIED - Default State */ + STATE_UNINITIALIZED_UNSPECIFIED = 0, + /** STATE_INIT - A connection end has just started the opening handshake. */ + STATE_INIT = 1, + /** + * STATE_TRYOPEN - A connection end has acknowledged the handshake step on the counterparty + * chain. + */ + STATE_TRYOPEN = 2, + /** STATE_OPEN - A connection end has completed the handshake. */ + STATE_OPEN = 3, + UNRECOGNIZED = -1 +} +export declare const StateSDKType: typeof State; +export declare function stateFromJSON(object: any): State; +export declare function stateToJSON(object: State): string; +/** + * ConnectionEnd defines a stateful object on a chain connected to another + * separate one. + * NOTE: there must only be 2 defined ConnectionEnds to establish + * a connection between two chains. + */ +export interface ConnectionEnd { + /** client associated with this connection. */ + clientId: string; + /** + * IBC version which can be utilised to determine encodings or protocols for + * channels or packets utilising this connection. + */ + versions: Version[]; + /** current state of the connection end. */ + state: State; + /** counterparty chain associated with this connection. */ + counterparty?: Counterparty; + /** + * delay period that must pass before a consensus state can be used for + * packet-verification NOTE: delay period logic is only implemented by some + * clients. + */ + delayPeriod: Long; +} +/** + * ConnectionEnd defines a stateful object on a chain connected to another + * separate one. + * NOTE: there must only be 2 defined ConnectionEnds to establish + * a connection between two chains. + */ +export interface ConnectionEndSDKType { + client_id: string; + versions: VersionSDKType[]; + state: State; + counterparty?: CounterpartySDKType; + delay_period: Long; +} +/** + * IdentifiedConnection defines a connection with additional connection + * identifier field. + */ +export interface IdentifiedConnection { + /** connection identifier. */ + id: string; + /** client associated with this connection. */ + clientId: string; + /** + * IBC version which can be utilised to determine encodings or protocols for + * channels or packets utilising this connection + */ + versions: Version[]; + /** current state of the connection end. */ + state: State; + /** counterparty chain associated with this connection. */ + counterparty?: Counterparty; + /** delay period associated with this connection. */ + delayPeriod: Long; +} +/** + * IdentifiedConnection defines a connection with additional connection + * identifier field. + */ +export interface IdentifiedConnectionSDKType { + id: string; + client_id: string; + versions: VersionSDKType[]; + state: State; + counterparty?: CounterpartySDKType; + delay_period: Long; +} +/** Counterparty defines the counterparty chain associated with a connection end. */ +export interface Counterparty { + /** + * identifies the client on the counterparty chain associated with a given + * connection. + */ + clientId: string; + /** + * identifies the connection end on the counterparty chain associated with a + * given connection. + */ + connectionId: string; + /** commitment merkle prefix of the counterparty chain. */ + prefix?: MerklePrefix; +} +/** Counterparty defines the counterparty chain associated with a connection end. */ +export interface CounterpartySDKType { + client_id: string; + connection_id: string; + prefix?: MerklePrefixSDKType; +} +/** ClientPaths define all the connection paths for a client state. */ +export interface ClientPaths { + /** list of connection paths */ + paths: string[]; +} +/** ClientPaths define all the connection paths for a client state. */ +export interface ClientPathsSDKType { + paths: string[]; +} +/** ConnectionPaths define all the connection paths for a given client state. */ +export interface ConnectionPaths { + /** client state unique identifier */ + clientId: string; + /** list of connection paths */ + paths: string[]; +} +/** ConnectionPaths define all the connection paths for a given client state. */ +export interface ConnectionPathsSDKType { + client_id: string; + paths: string[]; +} +/** + * Version defines the versioning scheme used to negotiate the IBC verison in + * the connection handshake. + */ +export interface Version { + /** unique version identifier */ + identifier: string; + /** list of features compatible with the specified identifier */ + features: string[]; +} +/** + * Version defines the versioning scheme used to negotiate the IBC verison in + * the connection handshake. + */ +export interface VersionSDKType { + identifier: string; + features: string[]; +} +/** Params defines the set of Connection parameters. */ +export interface Params { + /** + * maximum expected time per block (in nanoseconds), used to enforce block delay. This parameter should reflect the + * largest amount of time that the chain might reasonably take to produce the next block under normal operating + * conditions. A safe choice is 3-5x the expected time per block. + */ + maxExpectedTimePerBlock: Long; +} +/** Params defines the set of Connection parameters. */ +export interface ParamsSDKType { + max_expected_time_per_block: Long; +} +export declare const ConnectionEnd: { + encode(message: ConnectionEnd, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConnectionEnd; + fromPartial(object: DeepPartial): ConnectionEnd; +}; +export declare const IdentifiedConnection: { + encode(message: IdentifiedConnection, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): IdentifiedConnection; + fromPartial(object: DeepPartial): IdentifiedConnection; +}; +export declare const Counterparty: { + encode(message: Counterparty, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Counterparty; + fromPartial(object: DeepPartial): Counterparty; +}; +export declare const ClientPaths: { + encode(message: ClientPaths, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ClientPaths; + fromPartial(object: DeepPartial): ClientPaths; +}; +export declare const ConnectionPaths: { + encode(message: ConnectionPaths, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConnectionPaths; + fromPartial(object: DeepPartial): ConnectionPaths; +}; +export declare const Version: { + encode(message: Version, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Version; + fromPartial(object: DeepPartial): Version; +}; +export declare const Params: { + encode(message: Params, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Params; + fromPartial(object: DeepPartial): Params; +}; diff --git a/packages/codegen/dist/ibc/core/connection/v1/genesis.d.ts b/packages/codegen/dist/ibc/core/connection/v1/genesis.d.ts new file mode 100644 index 00000000..91470832 --- /dev/null +++ b/packages/codegen/dist/ibc/core/connection/v1/genesis.d.ts @@ -0,0 +1,24 @@ +/// +import { IdentifiedConnection, IdentifiedConnectionSDKType, ConnectionPaths, ConnectionPathsSDKType, Params, ParamsSDKType } from "./connection"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisState defines the ibc connection submodule's genesis state. */ +export interface GenesisState { + connections: IdentifiedConnection[]; + clientConnectionPaths: ConnectionPaths[]; + /** the sequence for the next generated connection identifier */ + nextConnectionSequence: Long; + params?: Params; +} +/** GenesisState defines the ibc connection submodule's genesis state. */ +export interface GenesisStateSDKType { + connections: IdentifiedConnectionSDKType[]; + client_connection_paths: ConnectionPathsSDKType[]; + next_connection_sequence: Long; + params?: ParamsSDKType; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/ibc/core/connection/v1/query.d.ts b/packages/codegen/dist/ibc/core/connection/v1/query.d.ts new file mode 100644 index 00000000..410061d2 --- /dev/null +++ b/packages/codegen/dist/ibc/core/connection/v1/query.d.ts @@ -0,0 +1,245 @@ +/// +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../../../cosmos/base/query/v1beta1/pagination"; +import { ConnectionEnd, ConnectionEndSDKType, IdentifiedConnection, IdentifiedConnectionSDKType } from "./connection"; +import { Height, HeightSDKType, IdentifiedClientState, IdentifiedClientStateSDKType } from "../../client/v1/client"; +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * QueryConnectionRequest is the request type for the Query/Connection RPC + * method + */ +export interface QueryConnectionRequest { + /** connection unique identifier */ + connectionId: string; +} +/** + * QueryConnectionRequest is the request type for the Query/Connection RPC + * method + */ +export interface QueryConnectionRequestSDKType { + connection_id: string; +} +/** + * QueryConnectionResponse is the response type for the Query/Connection RPC + * method. Besides the connection end, it includes a proof and the height from + * which the proof was retrieved. + */ +export interface QueryConnectionResponse { + /** connection associated with the request identifier */ + connection?: ConnectionEnd; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was retrieved */ + proofHeight?: Height; +} +/** + * QueryConnectionResponse is the response type for the Query/Connection RPC + * method. Besides the connection end, it includes a proof and the height from + * which the proof was retrieved. + */ +export interface QueryConnectionResponseSDKType { + connection?: ConnectionEndSDKType; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryConnectionsRequest is the request type for the Query/Connections RPC + * method + */ +export interface QueryConnectionsRequest { + pagination?: PageRequest; +} +/** + * QueryConnectionsRequest is the request type for the Query/Connections RPC + * method + */ +export interface QueryConnectionsRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryConnectionsResponse is the response type for the Query/Connections RPC + * method. + */ +export interface QueryConnectionsResponse { + /** list of stored connections of the chain. */ + connections: IdentifiedConnection[]; + /** pagination response */ + pagination?: PageResponse; + /** query block height */ + height?: Height; +} +/** + * QueryConnectionsResponse is the response type for the Query/Connections RPC + * method. + */ +export interface QueryConnectionsResponseSDKType { + connections: IdentifiedConnectionSDKType[]; + pagination?: PageResponseSDKType; + height?: HeightSDKType; +} +/** + * QueryClientConnectionsRequest is the request type for the + * Query/ClientConnections RPC method + */ +export interface QueryClientConnectionsRequest { + /** client identifier associated with a connection */ + clientId: string; +} +/** + * QueryClientConnectionsRequest is the request type for the + * Query/ClientConnections RPC method + */ +export interface QueryClientConnectionsRequestSDKType { + client_id: string; +} +/** + * QueryClientConnectionsResponse is the response type for the + * Query/ClientConnections RPC method + */ +export interface QueryClientConnectionsResponse { + /** slice of all the connection paths associated with a client. */ + connectionPaths: string[]; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was generated */ + proofHeight?: Height; +} +/** + * QueryClientConnectionsResponse is the response type for the + * Query/ClientConnections RPC method + */ +export interface QueryClientConnectionsResponseSDKType { + connection_paths: string[]; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryConnectionClientStateRequest is the request type for the + * Query/ConnectionClientState RPC method + */ +export interface QueryConnectionClientStateRequest { + /** connection identifier */ + connectionId: string; +} +/** + * QueryConnectionClientStateRequest is the request type for the + * Query/ConnectionClientState RPC method + */ +export interface QueryConnectionClientStateRequestSDKType { + connection_id: string; +} +/** + * QueryConnectionClientStateResponse is the response type for the + * Query/ConnectionClientState RPC method + */ +export interface QueryConnectionClientStateResponse { + /** client state associated with the channel */ + identifiedClientState?: IdentifiedClientState; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was retrieved */ + proofHeight?: Height; +} +/** + * QueryConnectionClientStateResponse is the response type for the + * Query/ConnectionClientState RPC method + */ +export interface QueryConnectionClientStateResponseSDKType { + identified_client_state?: IdentifiedClientStateSDKType; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryConnectionConsensusStateRequest is the request type for the + * Query/ConnectionConsensusState RPC method + */ +export interface QueryConnectionConsensusStateRequest { + /** connection identifier */ + connectionId: string; + revisionNumber: Long; + revisionHeight: Long; +} +/** + * QueryConnectionConsensusStateRequest is the request type for the + * Query/ConnectionConsensusState RPC method + */ +export interface QueryConnectionConsensusStateRequestSDKType { + connection_id: string; + revision_number: Long; + revision_height: Long; +} +/** + * QueryConnectionConsensusStateResponse is the response type for the + * Query/ConnectionConsensusState RPC method + */ +export interface QueryConnectionConsensusStateResponse { + /** consensus state associated with the channel */ + consensusState?: Any; + /** client ID associated with the consensus state */ + clientId: string; + /** merkle proof of existence */ + proof: Uint8Array; + /** height at which the proof was retrieved */ + proofHeight?: Height; +} +/** + * QueryConnectionConsensusStateResponse is the response type for the + * Query/ConnectionConsensusState RPC method + */ +export interface QueryConnectionConsensusStateResponseSDKType { + consensus_state?: AnySDKType; + client_id: string; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +export declare const QueryConnectionRequest: { + encode(message: QueryConnectionRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionRequest; + fromPartial(object: DeepPartial): QueryConnectionRequest; +}; +export declare const QueryConnectionResponse: { + encode(message: QueryConnectionResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionResponse; + fromPartial(object: DeepPartial): QueryConnectionResponse; +}; +export declare const QueryConnectionsRequest: { + encode(message: QueryConnectionsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionsRequest; + fromPartial(object: DeepPartial): QueryConnectionsRequest; +}; +export declare const QueryConnectionsResponse: { + encode(message: QueryConnectionsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionsResponse; + fromPartial(object: DeepPartial): QueryConnectionsResponse; +}; +export declare const QueryClientConnectionsRequest: { + encode(message: QueryClientConnectionsRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientConnectionsRequest; + fromPartial(object: DeepPartial): QueryClientConnectionsRequest; +}; +export declare const QueryClientConnectionsResponse: { + encode(message: QueryClientConnectionsResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientConnectionsResponse; + fromPartial(object: DeepPartial): QueryClientConnectionsResponse; +}; +export declare const QueryConnectionClientStateRequest: { + encode(message: QueryConnectionClientStateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionClientStateRequest; + fromPartial(object: DeepPartial): QueryConnectionClientStateRequest; +}; +export declare const QueryConnectionClientStateResponse: { + encode(message: QueryConnectionClientStateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionClientStateResponse; + fromPartial(object: DeepPartial): QueryConnectionClientStateResponse; +}; +export declare const QueryConnectionConsensusStateRequest: { + encode(message: QueryConnectionConsensusStateRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionConsensusStateRequest; + fromPartial(object: DeepPartial): QueryConnectionConsensusStateRequest; +}; +export declare const QueryConnectionConsensusStateResponse: { + encode(message: QueryConnectionConsensusStateResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionConsensusStateResponse; + fromPartial(object: DeepPartial): QueryConnectionConsensusStateResponse; +}; diff --git a/packages/codegen/dist/ibc/core/connection/v1/query.lcd.d.ts b/packages/codegen/dist/ibc/core/connection/v1/query.lcd.d.ts new file mode 100644 index 00000000..fec50273 --- /dev/null +++ b/packages/codegen/dist/ibc/core/connection/v1/query.lcd.d.ts @@ -0,0 +1,13 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryConnectionRequest, QueryConnectionResponseSDKType, QueryConnectionsRequest, QueryConnectionsResponseSDKType, QueryClientConnectionsRequest, QueryClientConnectionsResponseSDKType, QueryConnectionClientStateRequest, QueryConnectionClientStateResponseSDKType, QueryConnectionConsensusStateRequest, QueryConnectionConsensusStateResponseSDKType } from "./query"; +export declare class LCDQueryClient { + req: LCDClient; + constructor({ requestClient }: { + requestClient: LCDClient; + }); + connection(params: QueryConnectionRequest): Promise; + connections(params?: QueryConnectionsRequest): Promise; + clientConnections(params: QueryClientConnectionsRequest): Promise; + connectionClientState(params: QueryConnectionClientStateRequest): Promise; + connectionConsensusState(params: QueryConnectionConsensusStateRequest): Promise; +} diff --git a/packages/codegen/dist/ibc/core/connection/v1/query.rpc.Query.d.ts b/packages/codegen/dist/ibc/core/connection/v1/query.rpc.Query.d.ts new file mode 100644 index 00000000..7eb562b1 --- /dev/null +++ b/packages/codegen/dist/ibc/core/connection/v1/query.rpc.Query.d.ts @@ -0,0 +1,41 @@ +import { Rpc } from "../../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryConnectionRequest, QueryConnectionResponse, QueryConnectionsRequest, QueryConnectionsResponse, QueryClientConnectionsRequest, QueryClientConnectionsResponse, QueryConnectionClientStateRequest, QueryConnectionClientStateResponse, QueryConnectionConsensusStateRequest, QueryConnectionConsensusStateResponse } from "./query"; +/** Query provides defines the gRPC querier service */ +export interface Query { + /** Connection queries an IBC connection end. */ + connection(request: QueryConnectionRequest): Promise; + /** Connections queries all the IBC connections of a chain. */ + connections(request?: QueryConnectionsRequest): Promise; + /** + * ClientConnections queries the connection paths associated with a client + * state. + */ + clientConnections(request: QueryClientConnectionsRequest): Promise; + /** + * ConnectionClientState queries the client state associated with the + * connection. + */ + connectionClientState(request: QueryConnectionClientStateRequest): Promise; + /** + * ConnectionConsensusState queries the consensus state associated with the + * connection. + */ + connectionConsensusState(request: QueryConnectionConsensusStateRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + connection(request: QueryConnectionRequest): Promise; + connections(request?: QueryConnectionsRequest): Promise; + clientConnections(request: QueryClientConnectionsRequest): Promise; + connectionClientState(request: QueryConnectionClientStateRequest): Promise; + connectionConsensusState(request: QueryConnectionConsensusStateRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + connection(request: QueryConnectionRequest): Promise; + connections(request?: QueryConnectionsRequest): Promise; + clientConnections(request: QueryClientConnectionsRequest): Promise; + connectionClientState(request: QueryConnectionClientStateRequest): Promise; + connectionConsensusState(request: QueryConnectionConsensusStateRequest): Promise; +}; diff --git a/packages/codegen/dist/ibc/core/connection/v1/tx.d.ts b/packages/codegen/dist/ibc/core/connection/v1/tx.d.ts new file mode 100644 index 00000000..66fa3105 --- /dev/null +++ b/packages/codegen/dist/ibc/core/connection/v1/tx.d.ts @@ -0,0 +1,209 @@ +/// +import { Counterparty, CounterpartySDKType, Version, VersionSDKType } from "./connection"; +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { Height, HeightSDKType } from "../../client/v1/client"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * MsgConnectionOpenInit defines the msg sent by an account on Chain A to + * initialize a connection with Chain B. + */ +export interface MsgConnectionOpenInit { + clientId: string; + counterparty?: Counterparty; + version?: Version; + delayPeriod: Long; + signer: string; +} +/** + * MsgConnectionOpenInit defines the msg sent by an account on Chain A to + * initialize a connection with Chain B. + */ +export interface MsgConnectionOpenInitSDKType { + client_id: string; + counterparty?: CounterpartySDKType; + version?: VersionSDKType; + delay_period: Long; + signer: string; +} +/** + * MsgConnectionOpenInitResponse defines the Msg/ConnectionOpenInit response + * type. + */ +export interface MsgConnectionOpenInitResponse { +} +/** + * MsgConnectionOpenInitResponse defines the Msg/ConnectionOpenInit response + * type. + */ +export interface MsgConnectionOpenInitResponseSDKType { +} +/** + * MsgConnectionOpenTry defines a msg sent by a Relayer to try to open a + * connection on Chain B. + */ +export interface MsgConnectionOpenTry { + clientId: string; + /** + * in the case of crossing hello's, when both chains call OpenInit, we need + * the connection identifier of the previous connection in state INIT + */ + previousConnectionId: string; + clientState?: Any; + counterparty?: Counterparty; + delayPeriod: Long; + counterpartyVersions: Version[]; + proofHeight?: Height; + /** + * proof of the initialization the connection on Chain A: `UNITIALIZED -> + * INIT` + */ + proofInit: Uint8Array; + /** proof of client state included in message */ + proofClient: Uint8Array; + /** proof of client consensus state */ + proofConsensus: Uint8Array; + consensusHeight?: Height; + signer: string; +} +/** + * MsgConnectionOpenTry defines a msg sent by a Relayer to try to open a + * connection on Chain B. + */ +export interface MsgConnectionOpenTrySDKType { + client_id: string; + previous_connection_id: string; + client_state?: AnySDKType; + counterparty?: CounterpartySDKType; + delay_period: Long; + counterparty_versions: VersionSDKType[]; + proof_height?: HeightSDKType; + proof_init: Uint8Array; + proof_client: Uint8Array; + proof_consensus: Uint8Array; + consensus_height?: HeightSDKType; + signer: string; +} +/** MsgConnectionOpenTryResponse defines the Msg/ConnectionOpenTry response type. */ +export interface MsgConnectionOpenTryResponse { +} +/** MsgConnectionOpenTryResponse defines the Msg/ConnectionOpenTry response type. */ +export interface MsgConnectionOpenTryResponseSDKType { +} +/** + * MsgConnectionOpenAck defines a msg sent by a Relayer to Chain A to + * acknowledge the change of connection state to TRYOPEN on Chain B. + */ +export interface MsgConnectionOpenAck { + connectionId: string; + counterpartyConnectionId: string; + version?: Version; + clientState?: Any; + proofHeight?: Height; + /** + * proof of the initialization the connection on Chain B: `UNITIALIZED -> + * TRYOPEN` + */ + proofTry: Uint8Array; + /** proof of client state included in message */ + proofClient: Uint8Array; + /** proof of client consensus state */ + proofConsensus: Uint8Array; + consensusHeight?: Height; + signer: string; +} +/** + * MsgConnectionOpenAck defines a msg sent by a Relayer to Chain A to + * acknowledge the change of connection state to TRYOPEN on Chain B. + */ +export interface MsgConnectionOpenAckSDKType { + connection_id: string; + counterparty_connection_id: string; + version?: VersionSDKType; + client_state?: AnySDKType; + proof_height?: HeightSDKType; + proof_try: Uint8Array; + proof_client: Uint8Array; + proof_consensus: Uint8Array; + consensus_height?: HeightSDKType; + signer: string; +} +/** MsgConnectionOpenAckResponse defines the Msg/ConnectionOpenAck response type. */ +export interface MsgConnectionOpenAckResponse { +} +/** MsgConnectionOpenAckResponse defines the Msg/ConnectionOpenAck response type. */ +export interface MsgConnectionOpenAckResponseSDKType { +} +/** + * MsgConnectionOpenConfirm defines a msg sent by a Relayer to Chain B to + * acknowledge the change of connection state to OPEN on Chain A. + */ +export interface MsgConnectionOpenConfirm { + connectionId: string; + /** proof for the change of the connection state on Chain A: `INIT -> OPEN` */ + proofAck: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** + * MsgConnectionOpenConfirm defines a msg sent by a Relayer to Chain B to + * acknowledge the change of connection state to OPEN on Chain A. + */ +export interface MsgConnectionOpenConfirmSDKType { + connection_id: string; + proof_ack: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** + * MsgConnectionOpenConfirmResponse defines the Msg/ConnectionOpenConfirm + * response type. + */ +export interface MsgConnectionOpenConfirmResponse { +} +/** + * MsgConnectionOpenConfirmResponse defines the Msg/ConnectionOpenConfirm + * response type. + */ +export interface MsgConnectionOpenConfirmResponseSDKType { +} +export declare const MsgConnectionOpenInit: { + encode(message: MsgConnectionOpenInit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenInit; + fromPartial(object: DeepPartial): MsgConnectionOpenInit; +}; +export declare const MsgConnectionOpenInitResponse: { + encode(_: MsgConnectionOpenInitResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenInitResponse; + fromPartial(_: DeepPartial): MsgConnectionOpenInitResponse; +}; +export declare const MsgConnectionOpenTry: { + encode(message: MsgConnectionOpenTry, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenTry; + fromPartial(object: DeepPartial): MsgConnectionOpenTry; +}; +export declare const MsgConnectionOpenTryResponse: { + encode(_: MsgConnectionOpenTryResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenTryResponse; + fromPartial(_: DeepPartial): MsgConnectionOpenTryResponse; +}; +export declare const MsgConnectionOpenAck: { + encode(message: MsgConnectionOpenAck, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenAck; + fromPartial(object: DeepPartial): MsgConnectionOpenAck; +}; +export declare const MsgConnectionOpenAckResponse: { + encode(_: MsgConnectionOpenAckResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenAckResponse; + fromPartial(_: DeepPartial): MsgConnectionOpenAckResponse; +}; +export declare const MsgConnectionOpenConfirm: { + encode(message: MsgConnectionOpenConfirm, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenConfirm; + fromPartial(object: DeepPartial): MsgConnectionOpenConfirm; +}; +export declare const MsgConnectionOpenConfirmResponse: { + encode(_: MsgConnectionOpenConfirmResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenConfirmResponse; + fromPartial(_: DeepPartial): MsgConnectionOpenConfirmResponse; +}; diff --git a/packages/codegen/dist/ibc/core/connection/v1/tx.rpc.msg.d.ts b/packages/codegen/dist/ibc/core/connection/v1/tx.rpc.msg.d.ts new file mode 100644 index 00000000..06fc8d7d --- /dev/null +++ b/packages/codegen/dist/ibc/core/connection/v1/tx.rpc.msg.d.ts @@ -0,0 +1,24 @@ +import { Rpc } from "../../../../helpers"; +import { MsgConnectionOpenInit, MsgConnectionOpenInitResponse, MsgConnectionOpenTry, MsgConnectionOpenTryResponse, MsgConnectionOpenAck, MsgConnectionOpenAckResponse, MsgConnectionOpenConfirm, MsgConnectionOpenConfirmResponse } from "./tx"; +/** Msg defines the ibc/connection Msg service. */ +export interface Msg { + /** ConnectionOpenInit defines a rpc handler method for MsgConnectionOpenInit. */ + connectionOpenInit(request: MsgConnectionOpenInit): Promise; + /** ConnectionOpenTry defines a rpc handler method for MsgConnectionOpenTry. */ + connectionOpenTry(request: MsgConnectionOpenTry): Promise; + /** ConnectionOpenAck defines a rpc handler method for MsgConnectionOpenAck. */ + connectionOpenAck(request: MsgConnectionOpenAck): Promise; + /** + * ConnectionOpenConfirm defines a rpc handler method for + * MsgConnectionOpenConfirm. + */ + connectionOpenConfirm(request: MsgConnectionOpenConfirm): Promise; +} +export declare class MsgClientImpl implements Msg { + private readonly rpc; + constructor(rpc: Rpc); + connectionOpenInit(request: MsgConnectionOpenInit): Promise; + connectionOpenTry(request: MsgConnectionOpenTry): Promise; + connectionOpenAck(request: MsgConnectionOpenAck): Promise; + connectionOpenConfirm(request: MsgConnectionOpenConfirm): Promise; +} diff --git a/packages/codegen/dist/ibc/core/port/v1/query.d.ts b/packages/codegen/dist/ibc/core/port/v1/query.d.ts new file mode 100644 index 00000000..5369d615 --- /dev/null +++ b/packages/codegen/dist/ibc/core/port/v1/query.d.ts @@ -0,0 +1,46 @@ +import { Order, Counterparty, CounterpartySDKType } from "../../channel/v1/channel"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** QueryAppVersionRequest is the request type for the Query/AppVersion RPC method */ +export interface QueryAppVersionRequest { + /** port unique identifier */ + portId: string; + /** connection unique identifier */ + connectionId: string; + /** whether the channel is ordered or unordered */ + ordering: Order; + /** counterparty channel end */ + counterparty?: Counterparty; + /** proposed version */ + proposedVersion: string; +} +/** QueryAppVersionRequest is the request type for the Query/AppVersion RPC method */ +export interface QueryAppVersionRequestSDKType { + port_id: string; + connection_id: string; + ordering: Order; + counterparty?: CounterpartySDKType; + proposed_version: string; +} +/** QueryAppVersionResponse is the response type for the Query/AppVersion RPC method. */ +export interface QueryAppVersionResponse { + /** port id associated with the request identifiers */ + portId: string; + /** supported app version */ + version: string; +} +/** QueryAppVersionResponse is the response type for the Query/AppVersion RPC method. */ +export interface QueryAppVersionResponseSDKType { + port_id: string; + version: string; +} +export declare const QueryAppVersionRequest: { + encode(message: QueryAppVersionRequest, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAppVersionRequest; + fromPartial(object: DeepPartial): QueryAppVersionRequest; +}; +export declare const QueryAppVersionResponse: { + encode(message: QueryAppVersionResponse, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAppVersionResponse; + fromPartial(object: DeepPartial): QueryAppVersionResponse; +}; diff --git a/packages/codegen/dist/ibc/core/port/v1/query.rpc.Query.d.ts b/packages/codegen/dist/ibc/core/port/v1/query.rpc.Query.d.ts new file mode 100644 index 00000000..7e0f991c --- /dev/null +++ b/packages/codegen/dist/ibc/core/port/v1/query.rpc.Query.d.ts @@ -0,0 +1,16 @@ +import { Rpc } from "../../../../helpers"; +import { QueryClient } from "@cosmjs/stargate"; +import { QueryAppVersionRequest, QueryAppVersionResponse } from "./query"; +/** Query defines the gRPC querier service */ +export interface Query { + /** AppVersion queries an IBC Port and determines the appropriate application version to be used */ + appVersion(request: QueryAppVersionRequest): Promise; +} +export declare class QueryClientImpl implements Query { + private readonly rpc; + constructor(rpc: Rpc); + appVersion(request: QueryAppVersionRequest): Promise; +} +export declare const createRpcQueryExtension: (base: QueryClient) => { + appVersion(request: QueryAppVersionRequest): Promise; +}; diff --git a/packages/codegen/dist/ibc/core/types/v1/genesis.d.ts b/packages/codegen/dist/ibc/core/types/v1/genesis.d.ts new file mode 100644 index 00000000..c7a59949 --- /dev/null +++ b/packages/codegen/dist/ibc/core/types/v1/genesis.d.ts @@ -0,0 +1,28 @@ +import { GenesisState as GenesisState1 } from "../../client/v1/genesis"; +import { GenesisStateSDKType as GenesisState1SDKType } from "../../client/v1/genesis"; +import { GenesisState as GenesisState2 } from "../../connection/v1/genesis"; +import { GenesisStateSDKType as GenesisState2SDKType } from "../../connection/v1/genesis"; +import { GenesisState as GenesisState3 } from "../../channel/v1/genesis"; +import { GenesisStateSDKType as GenesisState3SDKType } from "../../channel/v1/genesis"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** GenesisState defines the ibc module's genesis state. */ +export interface GenesisState { + /** ICS002 - Clients genesis state */ + clientGenesis?: GenesisState1; + /** ICS003 - Connections genesis state */ + connectionGenesis?: GenesisState2; + /** ICS004 - Channel genesis state */ + channelGenesis?: GenesisState3; +} +/** GenesisState defines the ibc module's genesis state. */ +export interface GenesisStateSDKType { + client_genesis?: GenesisState1SDKType; + connection_genesis?: GenesisState2SDKType; + channel_genesis?: GenesisState3SDKType; +} +export declare const GenesisState: { + encode(message: GenesisState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState; + fromPartial(object: DeepPartial): GenesisState; +}; diff --git a/packages/codegen/dist/ibc/lcd.d.ts b/packages/codegen/dist/ibc/lcd.d.ts new file mode 100644 index 00000000..77c522d8 --- /dev/null +++ b/packages/codegen/dist/ibc/lcd.d.ts @@ -0,0 +1,75 @@ +export declare const createLCDClient: ({ restEndpoint }: { + restEndpoint: string; +}) => Promise<{ + cosmos: { + auth: { + v1beta1: import("../cosmos/auth/v1beta1/query.lcd").LCDQueryClient; + }; + authz: { + v1beta1: import("../cosmos/authz/v1beta1/query.lcd").LCDQueryClient; + }; + bank: { + v1beta1: import("../cosmos/bank/v1beta1/query.lcd").LCDQueryClient; + }; + base: { + tendermint: { + v1beta1: import("../cosmos/base/tendermint/v1beta1/query.lcd").LCDQueryClient; + }; + }; + distribution: { + v1beta1: import("../cosmos/distribution/v1beta1/query.lcd").LCDQueryClient; + }; + evidence: { + v1beta1: import("../cosmos/evidence/v1beta1/query.lcd").LCDQueryClient; + }; + feegrant: { + v1beta1: import("../cosmos/feegrant/v1beta1/query.lcd").LCDQueryClient; + }; + gov: { + v1: import("../cosmos/gov/v1/query.lcd").LCDQueryClient; + v1beta1: import("../cosmos/gov/v1beta1/query.lcd").LCDQueryClient; + }; + group: { + v1: import("../cosmos/group/v1/query.lcd").LCDQueryClient; + }; + mint: { + v1beta1: import("../cosmos/mint/v1beta1/query.lcd").LCDQueryClient; + }; + nft: { + v1beta1: import("../cosmos/nft/v1beta1/query.lcd").LCDQueryClient; + }; + params: { + v1beta1: import("../cosmos/params/v1beta1/query.lcd").LCDQueryClient; + }; + slashing: { + v1beta1: import("../cosmos/slashing/v1beta1/query.lcd").LCDQueryClient; + }; + staking: { + v1beta1: import("../cosmos/staking/v1beta1/query.lcd").LCDQueryClient; + }; + tx: { + v1beta1: import("../cosmos/tx/v1beta1/service.lcd").LCDQueryClient; + }; + upgrade: { + v1beta1: import("../cosmos/upgrade/v1beta1/query.lcd").LCDQueryClient; + }; + }; + ibc: { + applications: { + transfer: { + v1: import("./applications/transfer/v1/query.lcd").LCDQueryClient; + }; + }; + core: { + channel: { + v1: import("./core/channel/v1/query.lcd").LCDQueryClient; + }; + client: { + v1: import("./core/client/v1/query.lcd").LCDQueryClient; + }; + connection: { + v1: import("./core/connection/v1/query.lcd").LCDQueryClient; + }; + }; + }; +}>; diff --git a/packages/codegen/dist/ibc/lightclients/localhost/v1/localhost.d.ts b/packages/codegen/dist/ibc/lightclients/localhost/v1/localhost.d.ts new file mode 100644 index 00000000..7e8df1a0 --- /dev/null +++ b/packages/codegen/dist/ibc/lightclients/localhost/v1/localhost.d.ts @@ -0,0 +1,26 @@ +import { Height, HeightSDKType } from "../../../core/client/v1/client"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * ClientState defines a loopback (localhost) client. It requires (read-only) + * access to keys outside the client prefix. + */ +export interface ClientState { + /** self chain ID */ + chainId: string; + /** self latest block height */ + height?: Height; +} +/** + * ClientState defines a loopback (localhost) client. It requires (read-only) + * access to keys outside the client prefix. + */ +export interface ClientStateSDKType { + chain_id: string; + height?: HeightSDKType; +} +export declare const ClientState: { + encode(message: ClientState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ClientState; + fromPartial(object: DeepPartial): ClientState; +}; diff --git a/packages/codegen/dist/ibc/lightclients/solomachine/v1/solomachine.d.ts b/packages/codegen/dist/ibc/lightclients/solomachine/v1/solomachine.d.ts new file mode 100644 index 00000000..1a23fd75 --- /dev/null +++ b/packages/codegen/dist/ibc/lightclients/solomachine/v1/solomachine.d.ts @@ -0,0 +1,391 @@ +/// +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { ConnectionEnd, ConnectionEndSDKType } from "../../../core/connection/v1/connection"; +import { Channel, ChannelSDKType } from "../../../core/channel/v1/channel"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * DataType defines the type of solo machine proof being created. This is done + * to preserve uniqueness of different data sign byte encodings. + */ +export declare enum DataType { + /** DATA_TYPE_UNINITIALIZED_UNSPECIFIED - Default State */ + DATA_TYPE_UNINITIALIZED_UNSPECIFIED = 0, + /** DATA_TYPE_CLIENT_STATE - Data type for client state verification */ + DATA_TYPE_CLIENT_STATE = 1, + /** DATA_TYPE_CONSENSUS_STATE - Data type for consensus state verification */ + DATA_TYPE_CONSENSUS_STATE = 2, + /** DATA_TYPE_CONNECTION_STATE - Data type for connection state verification */ + DATA_TYPE_CONNECTION_STATE = 3, + /** DATA_TYPE_CHANNEL_STATE - Data type for channel state verification */ + DATA_TYPE_CHANNEL_STATE = 4, + /** DATA_TYPE_PACKET_COMMITMENT - Data type for packet commitment verification */ + DATA_TYPE_PACKET_COMMITMENT = 5, + /** DATA_TYPE_PACKET_ACKNOWLEDGEMENT - Data type for packet acknowledgement verification */ + DATA_TYPE_PACKET_ACKNOWLEDGEMENT = 6, + /** DATA_TYPE_PACKET_RECEIPT_ABSENCE - Data type for packet receipt absence verification */ + DATA_TYPE_PACKET_RECEIPT_ABSENCE = 7, + /** DATA_TYPE_NEXT_SEQUENCE_RECV - Data type for next sequence recv verification */ + DATA_TYPE_NEXT_SEQUENCE_RECV = 8, + /** DATA_TYPE_HEADER - Data type for header verification */ + DATA_TYPE_HEADER = 9, + UNRECOGNIZED = -1 +} +export declare const DataTypeSDKType: typeof DataType; +export declare function dataTypeFromJSON(object: any): DataType; +export declare function dataTypeToJSON(object: DataType): string; +/** + * ClientState defines a solo machine client that tracks the current consensus + * state and if the client is frozen. + */ +export interface ClientState { + /** latest sequence of the client state */ + sequence: Long; + /** frozen sequence of the solo machine */ + frozenSequence: Long; + consensusState?: ConsensusState; + /** + * when set to true, will allow governance to update a solo machine client. + * The client will be unfrozen if it is frozen. + */ + allowUpdateAfterProposal: boolean; +} +/** + * ClientState defines a solo machine client that tracks the current consensus + * state and if the client is frozen. + */ +export interface ClientStateSDKType { + sequence: Long; + frozen_sequence: Long; + consensus_state?: ConsensusStateSDKType; + allow_update_after_proposal: boolean; +} +/** + * ConsensusState defines a solo machine consensus state. The sequence of a + * consensus state is contained in the "height" key used in storing the + * consensus state. + */ +export interface ConsensusState { + /** public key of the solo machine */ + publicKey?: Any; + /** + * diversifier allows the same public key to be re-used across different solo + * machine clients (potentially on different chains) without being considered + * misbehaviour. + */ + diversifier: string; + timestamp: Long; +} +/** + * ConsensusState defines a solo machine consensus state. The sequence of a + * consensus state is contained in the "height" key used in storing the + * consensus state. + */ +export interface ConsensusStateSDKType { + public_key?: AnySDKType; + diversifier: string; + timestamp: Long; +} +/** Header defines a solo machine consensus header */ +export interface Header { + /** sequence to update solo machine public key at */ + sequence: Long; + timestamp: Long; + signature: Uint8Array; + newPublicKey?: Any; + newDiversifier: string; +} +/** Header defines a solo machine consensus header */ +export interface HeaderSDKType { + sequence: Long; + timestamp: Long; + signature: Uint8Array; + new_public_key?: AnySDKType; + new_diversifier: string; +} +/** + * Misbehaviour defines misbehaviour for a solo machine which consists + * of a sequence and two signatures over different messages at that sequence. + */ +export interface Misbehaviour { + clientId: string; + sequence: Long; + signatureOne?: SignatureAndData; + signatureTwo?: SignatureAndData; +} +/** + * Misbehaviour defines misbehaviour for a solo machine which consists + * of a sequence and two signatures over different messages at that sequence. + */ +export interface MisbehaviourSDKType { + client_id: string; + sequence: Long; + signature_one?: SignatureAndDataSDKType; + signature_two?: SignatureAndDataSDKType; +} +/** + * SignatureAndData contains a signature and the data signed over to create that + * signature. + */ +export interface SignatureAndData { + signature: Uint8Array; + dataType: DataType; + data: Uint8Array; + timestamp: Long; +} +/** + * SignatureAndData contains a signature and the data signed over to create that + * signature. + */ +export interface SignatureAndDataSDKType { + signature: Uint8Array; + data_type: DataType; + data: Uint8Array; + timestamp: Long; +} +/** + * TimestampedSignatureData contains the signature data and the timestamp of the + * signature. + */ +export interface TimestampedSignatureData { + signatureData: Uint8Array; + timestamp: Long; +} +/** + * TimestampedSignatureData contains the signature data and the timestamp of the + * signature. + */ +export interface TimestampedSignatureDataSDKType { + signature_data: Uint8Array; + timestamp: Long; +} +/** SignBytes defines the signed bytes used for signature verification. */ +export interface SignBytes { + sequence: Long; + timestamp: Long; + diversifier: string; + /** type of the data used */ + dataType: DataType; + /** marshaled data */ + data: Uint8Array; +} +/** SignBytes defines the signed bytes used for signature verification. */ +export interface SignBytesSDKType { + sequence: Long; + timestamp: Long; + diversifier: string; + data_type: DataType; + data: Uint8Array; +} +/** HeaderData returns the SignBytes data for update verification. */ +export interface HeaderData { + /** header public key */ + newPubKey?: Any; + /** header diversifier */ + newDiversifier: string; +} +/** HeaderData returns the SignBytes data for update verification. */ +export interface HeaderDataSDKType { + new_pub_key?: AnySDKType; + new_diversifier: string; +} +/** ClientStateData returns the SignBytes data for client state verification. */ +export interface ClientStateData { + path: Uint8Array; + clientState?: Any; +} +/** ClientStateData returns the SignBytes data for client state verification. */ +export interface ClientStateDataSDKType { + path: Uint8Array; + client_state?: AnySDKType; +} +/** + * ConsensusStateData returns the SignBytes data for consensus state + * verification. + */ +export interface ConsensusStateData { + path: Uint8Array; + consensusState?: Any; +} +/** + * ConsensusStateData returns the SignBytes data for consensus state + * verification. + */ +export interface ConsensusStateDataSDKType { + path: Uint8Array; + consensus_state?: AnySDKType; +} +/** + * ConnectionStateData returns the SignBytes data for connection state + * verification. + */ +export interface ConnectionStateData { + path: Uint8Array; + connection?: ConnectionEnd; +} +/** + * ConnectionStateData returns the SignBytes data for connection state + * verification. + */ +export interface ConnectionStateDataSDKType { + path: Uint8Array; + connection?: ConnectionEndSDKType; +} +/** + * ChannelStateData returns the SignBytes data for channel state + * verification. + */ +export interface ChannelStateData { + path: Uint8Array; + channel?: Channel; +} +/** + * ChannelStateData returns the SignBytes data for channel state + * verification. + */ +export interface ChannelStateDataSDKType { + path: Uint8Array; + channel?: ChannelSDKType; +} +/** + * PacketCommitmentData returns the SignBytes data for packet commitment + * verification. + */ +export interface PacketCommitmentData { + path: Uint8Array; + commitment: Uint8Array; +} +/** + * PacketCommitmentData returns the SignBytes data for packet commitment + * verification. + */ +export interface PacketCommitmentDataSDKType { + path: Uint8Array; + commitment: Uint8Array; +} +/** + * PacketAcknowledgementData returns the SignBytes data for acknowledgement + * verification. + */ +export interface PacketAcknowledgementData { + path: Uint8Array; + acknowledgement: Uint8Array; +} +/** + * PacketAcknowledgementData returns the SignBytes data for acknowledgement + * verification. + */ +export interface PacketAcknowledgementDataSDKType { + path: Uint8Array; + acknowledgement: Uint8Array; +} +/** + * PacketReceiptAbsenceData returns the SignBytes data for + * packet receipt absence verification. + */ +export interface PacketReceiptAbsenceData { + path: Uint8Array; +} +/** + * PacketReceiptAbsenceData returns the SignBytes data for + * packet receipt absence verification. + */ +export interface PacketReceiptAbsenceDataSDKType { + path: Uint8Array; +} +/** + * NextSequenceRecvData returns the SignBytes data for verification of the next + * sequence to be received. + */ +export interface NextSequenceRecvData { + path: Uint8Array; + nextSeqRecv: Long; +} +/** + * NextSequenceRecvData returns the SignBytes data for verification of the next + * sequence to be received. + */ +export interface NextSequenceRecvDataSDKType { + path: Uint8Array; + next_seq_recv: Long; +} +export declare const ClientState: { + encode(message: ClientState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ClientState; + fromPartial(object: DeepPartial): ClientState; +}; +export declare const ConsensusState: { + encode(message: ConsensusState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusState; + fromPartial(object: DeepPartial): ConsensusState; +}; +export declare const Header: { + encode(message: Header, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Header; + fromPartial(object: DeepPartial
): Header; +}; +export declare const Misbehaviour: { + encode(message: Misbehaviour, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Misbehaviour; + fromPartial(object: DeepPartial): Misbehaviour; +}; +export declare const SignatureAndData: { + encode(message: SignatureAndData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureAndData; + fromPartial(object: DeepPartial): SignatureAndData; +}; +export declare const TimestampedSignatureData: { + encode(message: TimestampedSignatureData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TimestampedSignatureData; + fromPartial(object: DeepPartial): TimestampedSignatureData; +}; +export declare const SignBytes: { + encode(message: SignBytes, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignBytes; + fromPartial(object: DeepPartial): SignBytes; +}; +export declare const HeaderData: { + encode(message: HeaderData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): HeaderData; + fromPartial(object: DeepPartial): HeaderData; +}; +export declare const ClientStateData: { + encode(message: ClientStateData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ClientStateData; + fromPartial(object: DeepPartial): ClientStateData; +}; +export declare const ConsensusStateData: { + encode(message: ConsensusStateData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusStateData; + fromPartial(object: DeepPartial): ConsensusStateData; +}; +export declare const ConnectionStateData: { + encode(message: ConnectionStateData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConnectionStateData; + fromPartial(object: DeepPartial): ConnectionStateData; +}; +export declare const ChannelStateData: { + encode(message: ChannelStateData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ChannelStateData; + fromPartial(object: DeepPartial): ChannelStateData; +}; +export declare const PacketCommitmentData: { + encode(message: PacketCommitmentData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PacketCommitmentData; + fromPartial(object: DeepPartial): PacketCommitmentData; +}; +export declare const PacketAcknowledgementData: { + encode(message: PacketAcknowledgementData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PacketAcknowledgementData; + fromPartial(object: DeepPartial): PacketAcknowledgementData; +}; +export declare const PacketReceiptAbsenceData: { + encode(message: PacketReceiptAbsenceData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PacketReceiptAbsenceData; + fromPartial(object: DeepPartial): PacketReceiptAbsenceData; +}; +export declare const NextSequenceRecvData: { + encode(message: NextSequenceRecvData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): NextSequenceRecvData; + fromPartial(object: DeepPartial): NextSequenceRecvData; +}; diff --git a/packages/codegen/dist/ibc/lightclients/solomachine/v2/solomachine.d.ts b/packages/codegen/dist/ibc/lightclients/solomachine/v2/solomachine.d.ts new file mode 100644 index 00000000..40dd6c3a --- /dev/null +++ b/packages/codegen/dist/ibc/lightclients/solomachine/v2/solomachine.d.ts @@ -0,0 +1,391 @@ +/// +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { ConnectionEnd, ConnectionEndSDKType } from "../../../core/connection/v1/connection"; +import { Channel, ChannelSDKType } from "../../../core/channel/v1/channel"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * DataType defines the type of solo machine proof being created. This is done + * to preserve uniqueness of different data sign byte encodings. + */ +export declare enum DataType { + /** DATA_TYPE_UNINITIALIZED_UNSPECIFIED - Default State */ + DATA_TYPE_UNINITIALIZED_UNSPECIFIED = 0, + /** DATA_TYPE_CLIENT_STATE - Data type for client state verification */ + DATA_TYPE_CLIENT_STATE = 1, + /** DATA_TYPE_CONSENSUS_STATE - Data type for consensus state verification */ + DATA_TYPE_CONSENSUS_STATE = 2, + /** DATA_TYPE_CONNECTION_STATE - Data type for connection state verification */ + DATA_TYPE_CONNECTION_STATE = 3, + /** DATA_TYPE_CHANNEL_STATE - Data type for channel state verification */ + DATA_TYPE_CHANNEL_STATE = 4, + /** DATA_TYPE_PACKET_COMMITMENT - Data type for packet commitment verification */ + DATA_TYPE_PACKET_COMMITMENT = 5, + /** DATA_TYPE_PACKET_ACKNOWLEDGEMENT - Data type for packet acknowledgement verification */ + DATA_TYPE_PACKET_ACKNOWLEDGEMENT = 6, + /** DATA_TYPE_PACKET_RECEIPT_ABSENCE - Data type for packet receipt absence verification */ + DATA_TYPE_PACKET_RECEIPT_ABSENCE = 7, + /** DATA_TYPE_NEXT_SEQUENCE_RECV - Data type for next sequence recv verification */ + DATA_TYPE_NEXT_SEQUENCE_RECV = 8, + /** DATA_TYPE_HEADER - Data type for header verification */ + DATA_TYPE_HEADER = 9, + UNRECOGNIZED = -1 +} +export declare const DataTypeSDKType: typeof DataType; +export declare function dataTypeFromJSON(object: any): DataType; +export declare function dataTypeToJSON(object: DataType): string; +/** + * ClientState defines a solo machine client that tracks the current consensus + * state and if the client is frozen. + */ +export interface ClientState { + /** latest sequence of the client state */ + sequence: Long; + /** frozen sequence of the solo machine */ + isFrozen: boolean; + consensusState?: ConsensusState; + /** + * when set to true, will allow governance to update a solo machine client. + * The client will be unfrozen if it is frozen. + */ + allowUpdateAfterProposal: boolean; +} +/** + * ClientState defines a solo machine client that tracks the current consensus + * state and if the client is frozen. + */ +export interface ClientStateSDKType { + sequence: Long; + is_frozen: boolean; + consensus_state?: ConsensusStateSDKType; + allow_update_after_proposal: boolean; +} +/** + * ConsensusState defines a solo machine consensus state. The sequence of a + * consensus state is contained in the "height" key used in storing the + * consensus state. + */ +export interface ConsensusState { + /** public key of the solo machine */ + publicKey?: Any; + /** + * diversifier allows the same public key to be re-used across different solo + * machine clients (potentially on different chains) without being considered + * misbehaviour. + */ + diversifier: string; + timestamp: Long; +} +/** + * ConsensusState defines a solo machine consensus state. The sequence of a + * consensus state is contained in the "height" key used in storing the + * consensus state. + */ +export interface ConsensusStateSDKType { + public_key?: AnySDKType; + diversifier: string; + timestamp: Long; +} +/** Header defines a solo machine consensus header */ +export interface Header { + /** sequence to update solo machine public key at */ + sequence: Long; + timestamp: Long; + signature: Uint8Array; + newPublicKey?: Any; + newDiversifier: string; +} +/** Header defines a solo machine consensus header */ +export interface HeaderSDKType { + sequence: Long; + timestamp: Long; + signature: Uint8Array; + new_public_key?: AnySDKType; + new_diversifier: string; +} +/** + * Misbehaviour defines misbehaviour for a solo machine which consists + * of a sequence and two signatures over different messages at that sequence. + */ +export interface Misbehaviour { + clientId: string; + sequence: Long; + signatureOne?: SignatureAndData; + signatureTwo?: SignatureAndData; +} +/** + * Misbehaviour defines misbehaviour for a solo machine which consists + * of a sequence and two signatures over different messages at that sequence. + */ +export interface MisbehaviourSDKType { + client_id: string; + sequence: Long; + signature_one?: SignatureAndDataSDKType; + signature_two?: SignatureAndDataSDKType; +} +/** + * SignatureAndData contains a signature and the data signed over to create that + * signature. + */ +export interface SignatureAndData { + signature: Uint8Array; + dataType: DataType; + data: Uint8Array; + timestamp: Long; +} +/** + * SignatureAndData contains a signature and the data signed over to create that + * signature. + */ +export interface SignatureAndDataSDKType { + signature: Uint8Array; + data_type: DataType; + data: Uint8Array; + timestamp: Long; +} +/** + * TimestampedSignatureData contains the signature data and the timestamp of the + * signature. + */ +export interface TimestampedSignatureData { + signatureData: Uint8Array; + timestamp: Long; +} +/** + * TimestampedSignatureData contains the signature data and the timestamp of the + * signature. + */ +export interface TimestampedSignatureDataSDKType { + signature_data: Uint8Array; + timestamp: Long; +} +/** SignBytes defines the signed bytes used for signature verification. */ +export interface SignBytes { + sequence: Long; + timestamp: Long; + diversifier: string; + /** type of the data used */ + dataType: DataType; + /** marshaled data */ + data: Uint8Array; +} +/** SignBytes defines the signed bytes used for signature verification. */ +export interface SignBytesSDKType { + sequence: Long; + timestamp: Long; + diversifier: string; + data_type: DataType; + data: Uint8Array; +} +/** HeaderData returns the SignBytes data for update verification. */ +export interface HeaderData { + /** header public key */ + newPubKey?: Any; + /** header diversifier */ + newDiversifier: string; +} +/** HeaderData returns the SignBytes data for update verification. */ +export interface HeaderDataSDKType { + new_pub_key?: AnySDKType; + new_diversifier: string; +} +/** ClientStateData returns the SignBytes data for client state verification. */ +export interface ClientStateData { + path: Uint8Array; + clientState?: Any; +} +/** ClientStateData returns the SignBytes data for client state verification. */ +export interface ClientStateDataSDKType { + path: Uint8Array; + client_state?: AnySDKType; +} +/** + * ConsensusStateData returns the SignBytes data for consensus state + * verification. + */ +export interface ConsensusStateData { + path: Uint8Array; + consensusState?: Any; +} +/** + * ConsensusStateData returns the SignBytes data for consensus state + * verification. + */ +export interface ConsensusStateDataSDKType { + path: Uint8Array; + consensus_state?: AnySDKType; +} +/** + * ConnectionStateData returns the SignBytes data for connection state + * verification. + */ +export interface ConnectionStateData { + path: Uint8Array; + connection?: ConnectionEnd; +} +/** + * ConnectionStateData returns the SignBytes data for connection state + * verification. + */ +export interface ConnectionStateDataSDKType { + path: Uint8Array; + connection?: ConnectionEndSDKType; +} +/** + * ChannelStateData returns the SignBytes data for channel state + * verification. + */ +export interface ChannelStateData { + path: Uint8Array; + channel?: Channel; +} +/** + * ChannelStateData returns the SignBytes data for channel state + * verification. + */ +export interface ChannelStateDataSDKType { + path: Uint8Array; + channel?: ChannelSDKType; +} +/** + * PacketCommitmentData returns the SignBytes data for packet commitment + * verification. + */ +export interface PacketCommitmentData { + path: Uint8Array; + commitment: Uint8Array; +} +/** + * PacketCommitmentData returns the SignBytes data for packet commitment + * verification. + */ +export interface PacketCommitmentDataSDKType { + path: Uint8Array; + commitment: Uint8Array; +} +/** + * PacketAcknowledgementData returns the SignBytes data for acknowledgement + * verification. + */ +export interface PacketAcknowledgementData { + path: Uint8Array; + acknowledgement: Uint8Array; +} +/** + * PacketAcknowledgementData returns the SignBytes data for acknowledgement + * verification. + */ +export interface PacketAcknowledgementDataSDKType { + path: Uint8Array; + acknowledgement: Uint8Array; +} +/** + * PacketReceiptAbsenceData returns the SignBytes data for + * packet receipt absence verification. + */ +export interface PacketReceiptAbsenceData { + path: Uint8Array; +} +/** + * PacketReceiptAbsenceData returns the SignBytes data for + * packet receipt absence verification. + */ +export interface PacketReceiptAbsenceDataSDKType { + path: Uint8Array; +} +/** + * NextSequenceRecvData returns the SignBytes data for verification of the next + * sequence to be received. + */ +export interface NextSequenceRecvData { + path: Uint8Array; + nextSeqRecv: Long; +} +/** + * NextSequenceRecvData returns the SignBytes data for verification of the next + * sequence to be received. + */ +export interface NextSequenceRecvDataSDKType { + path: Uint8Array; + next_seq_recv: Long; +} +export declare const ClientState: { + encode(message: ClientState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ClientState; + fromPartial(object: DeepPartial): ClientState; +}; +export declare const ConsensusState: { + encode(message: ConsensusState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusState; + fromPartial(object: DeepPartial): ConsensusState; +}; +export declare const Header: { + encode(message: Header, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Header; + fromPartial(object: DeepPartial
): Header; +}; +export declare const Misbehaviour: { + encode(message: Misbehaviour, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Misbehaviour; + fromPartial(object: DeepPartial): Misbehaviour; +}; +export declare const SignatureAndData: { + encode(message: SignatureAndData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureAndData; + fromPartial(object: DeepPartial): SignatureAndData; +}; +export declare const TimestampedSignatureData: { + encode(message: TimestampedSignatureData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TimestampedSignatureData; + fromPartial(object: DeepPartial): TimestampedSignatureData; +}; +export declare const SignBytes: { + encode(message: SignBytes, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignBytes; + fromPartial(object: DeepPartial): SignBytes; +}; +export declare const HeaderData: { + encode(message: HeaderData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): HeaderData; + fromPartial(object: DeepPartial): HeaderData; +}; +export declare const ClientStateData: { + encode(message: ClientStateData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ClientStateData; + fromPartial(object: DeepPartial): ClientStateData; +}; +export declare const ConsensusStateData: { + encode(message: ConsensusStateData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusStateData; + fromPartial(object: DeepPartial): ConsensusStateData; +}; +export declare const ConnectionStateData: { + encode(message: ConnectionStateData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConnectionStateData; + fromPartial(object: DeepPartial): ConnectionStateData; +}; +export declare const ChannelStateData: { + encode(message: ChannelStateData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ChannelStateData; + fromPartial(object: DeepPartial): ChannelStateData; +}; +export declare const PacketCommitmentData: { + encode(message: PacketCommitmentData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PacketCommitmentData; + fromPartial(object: DeepPartial): PacketCommitmentData; +}; +export declare const PacketAcknowledgementData: { + encode(message: PacketAcknowledgementData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PacketAcknowledgementData; + fromPartial(object: DeepPartial): PacketAcknowledgementData; +}; +export declare const PacketReceiptAbsenceData: { + encode(message: PacketReceiptAbsenceData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PacketReceiptAbsenceData; + fromPartial(object: DeepPartial): PacketReceiptAbsenceData; +}; +export declare const NextSequenceRecvData: { + encode(message: NextSequenceRecvData, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): NextSequenceRecvData; + fromPartial(object: DeepPartial): NextSequenceRecvData; +}; diff --git a/packages/codegen/dist/ibc/lightclients/tendermint/v1/tendermint.d.ts b/packages/codegen/dist/ibc/lightclients/tendermint/v1/tendermint.d.ts new file mode 100644 index 00000000..c460c7bc --- /dev/null +++ b/packages/codegen/dist/ibc/lightclients/tendermint/v1/tendermint.d.ts @@ -0,0 +1,185 @@ +/// +import { Duration, DurationSDKType } from "../../../../google/protobuf/duration"; +import { Height, HeightSDKType } from "../../../core/client/v1/client"; +import { ProofSpec, ProofSpecSDKType } from "../../../../confio/proofs"; +import { MerkleRoot, MerkleRootSDKType } from "../../../core/commitment/v1/commitment"; +import { SignedHeader, SignedHeaderSDKType } from "../../../../tendermint/types/types"; +import { ValidatorSet, ValidatorSetSDKType } from "../../../../tendermint/types/validator"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * ClientState from Tendermint tracks the current validator set, latest height, + * and a possible frozen height. + */ +export interface ClientState { + chainId: string; + trustLevel?: Fraction; + /** + * duration of the period since the LastestTimestamp during which the + * submitted headers are valid for upgrade + */ + trustingPeriod?: Duration; + /** duration of the staking unbonding period */ + unbondingPeriod?: Duration; + /** defines how much new (untrusted) header's Time can drift into the future. */ + maxClockDrift?: Duration; + /** Block height when the client was frozen due to a misbehaviour */ + frozenHeight?: Height; + /** Latest height the client was updated to */ + latestHeight?: Height; + /** Proof specifications used in verifying counterparty state */ + proofSpecs: ProofSpec[]; + /** + * Path at which next upgraded client will be committed. + * Each element corresponds to the key for a single CommitmentProof in the + * chained proof. NOTE: ClientState must stored under + * `{upgradePath}/{upgradeHeight}/clientState` ConsensusState must be stored + * under `{upgradepath}/{upgradeHeight}/consensusState` For SDK chains using + * the default upgrade module, upgrade_path should be []string{"upgrade", + * "upgradedIBCState"}` + */ + upgradePath: string[]; + /** + * This flag, when set to true, will allow governance to recover a client + * which has expired + */ + allowUpdateAfterExpiry: boolean; + /** + * This flag, when set to true, will allow governance to unfreeze a client + * whose chain has experienced a misbehaviour event + */ + allowUpdateAfterMisbehaviour: boolean; +} +/** + * ClientState from Tendermint tracks the current validator set, latest height, + * and a possible frozen height. + */ +export interface ClientStateSDKType { + chain_id: string; + trust_level?: FractionSDKType; + trusting_period?: DurationSDKType; + unbonding_period?: DurationSDKType; + max_clock_drift?: DurationSDKType; + frozen_height?: HeightSDKType; + latest_height?: HeightSDKType; + proof_specs: ProofSpecSDKType[]; + upgrade_path: string[]; + allow_update_after_expiry: boolean; + allow_update_after_misbehaviour: boolean; +} +/** ConsensusState defines the consensus state from Tendermint. */ +export interface ConsensusState { + /** + * timestamp that corresponds to the block height in which the ConsensusState + * was stored. + */ + timestamp?: Date; + /** commitment root (i.e app hash) */ + root?: MerkleRoot; + nextValidatorsHash: Uint8Array; +} +/** ConsensusState defines the consensus state from Tendermint. */ +export interface ConsensusStateSDKType { + timestamp?: Date; + root?: MerkleRootSDKType; + next_validators_hash: Uint8Array; +} +/** + * Misbehaviour is a wrapper over two conflicting Headers + * that implements Misbehaviour interface expected by ICS-02 + */ +export interface Misbehaviour { + clientId: string; + header1?: Header; + header2?: Header; +} +/** + * Misbehaviour is a wrapper over two conflicting Headers + * that implements Misbehaviour interface expected by ICS-02 + */ +export interface MisbehaviourSDKType { + client_id: string; + header_1?: HeaderSDKType; + header_2?: HeaderSDKType; +} +/** + * Header defines the Tendermint client consensus Header. + * It encapsulates all the information necessary to update from a trusted + * Tendermint ConsensusState. The inclusion of TrustedHeight and + * TrustedValidators allows this update to process correctly, so long as the + * ConsensusState for the TrustedHeight exists, this removes race conditions + * among relayers The SignedHeader and ValidatorSet are the new untrusted update + * fields for the client. The TrustedHeight is the height of a stored + * ConsensusState on the client that will be used to verify the new untrusted + * header. The Trusted ConsensusState must be within the unbonding period of + * current time in order to correctly verify, and the TrustedValidators must + * hash to TrustedConsensusState.NextValidatorsHash since that is the last + * trusted validator set at the TrustedHeight. + */ +export interface Header { + signedHeader?: SignedHeader; + validatorSet?: ValidatorSet; + trustedHeight?: Height; + trustedValidators?: ValidatorSet; +} +/** + * Header defines the Tendermint client consensus Header. + * It encapsulates all the information necessary to update from a trusted + * Tendermint ConsensusState. The inclusion of TrustedHeight and + * TrustedValidators allows this update to process correctly, so long as the + * ConsensusState for the TrustedHeight exists, this removes race conditions + * among relayers The SignedHeader and ValidatorSet are the new untrusted update + * fields for the client. The TrustedHeight is the height of a stored + * ConsensusState on the client that will be used to verify the new untrusted + * header. The Trusted ConsensusState must be within the unbonding period of + * current time in order to correctly verify, and the TrustedValidators must + * hash to TrustedConsensusState.NextValidatorsHash since that is the last + * trusted validator set at the TrustedHeight. + */ +export interface HeaderSDKType { + signed_header?: SignedHeaderSDKType; + validator_set?: ValidatorSetSDKType; + trusted_height?: HeightSDKType; + trusted_validators?: ValidatorSetSDKType; +} +/** + * Fraction defines the protobuf message type for tmmath.Fraction that only + * supports positive values. + */ +export interface Fraction { + numerator: Long; + denominator: Long; +} +/** + * Fraction defines the protobuf message type for tmmath.Fraction that only + * supports positive values. + */ +export interface FractionSDKType { + numerator: Long; + denominator: Long; +} +export declare const ClientState: { + encode(message: ClientState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ClientState; + fromPartial(object: DeepPartial): ClientState; +}; +export declare const ConsensusState: { + encode(message: ConsensusState, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusState; + fromPartial(object: DeepPartial): ConsensusState; +}; +export declare const Misbehaviour: { + encode(message: Misbehaviour, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Misbehaviour; + fromPartial(object: DeepPartial): Misbehaviour; +}; +export declare const Header: { + encode(message: Header, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Header; + fromPartial(object: DeepPartial
): Header; +}; +export declare const Fraction: { + encode(message: Fraction, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Fraction; + fromPartial(object: DeepPartial): Fraction; +}; diff --git a/packages/codegen/dist/ibc/rpc.query.d.ts b/packages/codegen/dist/ibc/rpc.query.d.ts new file mode 100644 index 00000000..35c7da03 --- /dev/null +++ b/packages/codegen/dist/ibc/rpc.query.d.ts @@ -0,0 +1,243 @@ +import { HttpEndpoint } from "@cosmjs/tendermint-rpc"; +export declare const createRPCQueryClient: ({ rpcEndpoint }: { + rpcEndpoint: string | HttpEndpoint; +}) => Promise<{ + cosmos: { + app: { + v1alpha1: { + config(request?: import("../cosmos/app/v1alpha1/query").QueryConfigRequest): Promise; + }; + }; + auth: { + v1beta1: { + accounts(request?: import("../cosmos/auth/v1beta1/query").QueryAccountsRequest): Promise; + account(request: import("../cosmos/auth/v1beta1/query").QueryAccountRequest): Promise; + params(request?: import("../cosmos/auth/v1beta1/query").QueryParamsRequest): Promise; + moduleAccounts(request?: import("../cosmos/auth/v1beta1/query").QueryModuleAccountsRequest): Promise; + bech32Prefix(request?: import("../cosmos/auth/v1beta1/query").Bech32PrefixRequest): Promise; + addressBytesToString(request: import("../cosmos/auth/v1beta1/query").AddressBytesToStringRequest): Promise; + addressStringToBytes(request: import("../cosmos/auth/v1beta1/query").AddressStringToBytesRequest): Promise; + }; + }; + authz: { + v1beta1: { + grants(request: import("../cosmos/authz/v1beta1/query").QueryGrantsRequest): Promise; + granterGrants(request: import("../cosmos/authz/v1beta1/query").QueryGranterGrantsRequest): Promise; + granteeGrants(request: import("../cosmos/authz/v1beta1/query").QueryGranteeGrantsRequest): Promise; + }; + }; + bank: { + v1beta1: { + balance(request: import("../cosmos/bank/v1beta1/query").QueryBalanceRequest): Promise; + allBalances(request: import("../cosmos/bank/v1beta1/query").QueryAllBalancesRequest): Promise; + spendableBalances(request: import("../cosmos/bank/v1beta1/query").QuerySpendableBalancesRequest): Promise; + totalSupply(request?: import("../cosmos/bank/v1beta1/query").QueryTotalSupplyRequest): Promise; + supplyOf(request: import("../cosmos/bank/v1beta1/query").QuerySupplyOfRequest): Promise; + params(request?: import("../cosmos/bank/v1beta1/query").QueryParamsRequest): Promise; + denomMetadata(request: import("../cosmos/bank/v1beta1/query").QueryDenomMetadataRequest): Promise; + denomsMetadata(request?: import("../cosmos/bank/v1beta1/query").QueryDenomsMetadataRequest): Promise; + denomOwners(request: import("../cosmos/bank/v1beta1/query").QueryDenomOwnersRequest): Promise; + }; + }; + base: { + tendermint: { + v1beta1: { + getNodeInfo(request?: import("../cosmos/base/tendermint/v1beta1/query").GetNodeInfoRequest): Promise; + getSyncing(request?: import("../cosmos/base/tendermint/v1beta1/query").GetSyncingRequest): Promise; + getLatestBlock(request?: import("../cosmos/base/tendermint/v1beta1/query").GetLatestBlockRequest): Promise; + getBlockByHeight(request: import("../cosmos/base/tendermint/v1beta1/query").GetBlockByHeightRequest): Promise; + getLatestValidatorSet(request?: import("../cosmos/base/tendermint/v1beta1/query").GetLatestValidatorSetRequest): Promise; + getValidatorSetByHeight(request: import("../cosmos/base/tendermint/v1beta1/query").GetValidatorSetByHeightRequest): Promise; + }; + }; + }; + distribution: { + v1beta1: { + params(request?: import("../cosmos/distribution/v1beta1/query").QueryParamsRequest): Promise; + validatorOutstandingRewards(request: import("../cosmos/distribution/v1beta1/query").QueryValidatorOutstandingRewardsRequest): Promise; + validatorCommission(request: import("../cosmos/distribution/v1beta1/query").QueryValidatorCommissionRequest): Promise; + validatorSlashes(request: import("../cosmos/distribution/v1beta1/query").QueryValidatorSlashesRequest): Promise; + delegationRewards(request: import("../cosmos/distribution/v1beta1/query").QueryDelegationRewardsRequest): Promise; + delegationTotalRewards(request: import("../cosmos/distribution/v1beta1/query").QueryDelegationTotalRewardsRequest): Promise; + delegatorValidators(request: import("../cosmos/distribution/v1beta1/query").QueryDelegatorValidatorsRequest): Promise; + delegatorWithdrawAddress(request: import("../cosmos/distribution/v1beta1/query").QueryDelegatorWithdrawAddressRequest): Promise; + communityPool(request?: import("../cosmos/distribution/v1beta1/query").QueryCommunityPoolRequest): Promise; + }; + }; + evidence: { + v1beta1: { + evidence(request: import("../cosmos/evidence/v1beta1/query").QueryEvidenceRequest): Promise; + allEvidence(request?: import("../cosmos/evidence/v1beta1/query").QueryAllEvidenceRequest): Promise; + }; + }; + feegrant: { + v1beta1: { + allowance(request: import("../cosmos/feegrant/v1beta1/query").QueryAllowanceRequest): Promise; + allowances(request: import("../cosmos/feegrant/v1beta1/query").QueryAllowancesRequest): Promise; + allowancesByGranter(request: import("../cosmos/feegrant/v1beta1/query").QueryAllowancesByGranterRequest): Promise; + }; + }; + gov: { + v1: { + proposal(request: import("../cosmos/gov/v1/query").QueryProposalRequest): Promise; + proposals(request: import("../cosmos/gov/v1/query").QueryProposalsRequest): Promise; + vote(request: import("../cosmos/gov/v1/query").QueryVoteRequest): Promise; + votes(request: import("../cosmos/gov/v1/query").QueryVotesRequest): Promise; + params(request: import("../cosmos/gov/v1/query").QueryParamsRequest): Promise; + deposit(request: import("../cosmos/gov/v1/query").QueryDepositRequest): Promise; + deposits(request: import("../cosmos/gov/v1/query").QueryDepositsRequest): Promise; + tallyResult(request: import("../cosmos/gov/v1/query").QueryTallyResultRequest): Promise; + }; + v1beta1: { + proposal(request: import("../cosmos/gov/v1beta1/query").QueryProposalRequest): Promise; + proposals(request: import("../cosmos/gov/v1beta1/query").QueryProposalsRequest): Promise; + vote(request: import("../cosmos/gov/v1beta1/query").QueryVoteRequest): Promise; + votes(request: import("../cosmos/gov/v1beta1/query").QueryVotesRequest): Promise; + params(request: import("../cosmos/gov/v1beta1/query").QueryParamsRequest): Promise; + deposit(request: import("../cosmos/gov/v1beta1/query").QueryDepositRequest): Promise; + deposits(request: import("../cosmos/gov/v1beta1/query").QueryDepositsRequest): Promise; + tallyResult(request: import("../cosmos/gov/v1beta1/query").QueryTallyResultRequest): Promise; + }; + }; + group: { + v1: { + groupInfo(request: import("../cosmos/group/v1/query").QueryGroupInfoRequest): Promise; + groupPolicyInfo(request: import("../cosmos/group/v1/query").QueryGroupPolicyInfoRequest): Promise; + groupMembers(request: import("../cosmos/group/v1/query").QueryGroupMembersRequest): Promise; + groupsByAdmin(request: import("../cosmos/group/v1/query").QueryGroupsByAdminRequest): Promise; + groupPoliciesByGroup(request: import("../cosmos/group/v1/query").QueryGroupPoliciesByGroupRequest): Promise; + groupPoliciesByAdmin(request: import("../cosmos/group/v1/query").QueryGroupPoliciesByAdminRequest): Promise; + proposal(request: import("../cosmos/group/v1/query").QueryProposalRequest): Promise; + proposalsByGroupPolicy(request: import("../cosmos/group/v1/query").QueryProposalsByGroupPolicyRequest): Promise; + voteByProposalVoter(request: import("../cosmos/group/v1/query").QueryVoteByProposalVoterRequest): Promise; + votesByProposal(request: import("../cosmos/group/v1/query").QueryVotesByProposalRequest): Promise; + votesByVoter(request: import("../cosmos/group/v1/query").QueryVotesByVoterRequest): Promise; + groupsByMember(request: import("../cosmos/group/v1/query").QueryGroupsByMemberRequest): Promise; + tallyResult(request: import("../cosmos/group/v1/query").QueryTallyResultRequest): Promise; + }; + }; + mint: { + v1beta1: { + params(request?: import("../cosmos/mint/v1beta1/query").QueryParamsRequest): Promise; + inflation(request?: import("../cosmos/mint/v1beta1/query").QueryInflationRequest): Promise; + annualProvisions(request?: import("../cosmos/mint/v1beta1/query").QueryAnnualProvisionsRequest): Promise; + }; + }; + nft: { + v1beta1: { + balance(request: import("../cosmos/nft/v1beta1/query").QueryBalanceRequest): Promise; + owner(request: import("../cosmos/nft/v1beta1/query").QueryOwnerRequest): Promise; + supply(request: import("../cosmos/nft/v1beta1/query").QuerySupplyRequest): Promise; + nFTs(request: import("../cosmos/nft/v1beta1/query").QueryNFTsRequest): Promise; + nFT(request: import("../cosmos/nft/v1beta1/query").QueryNFTRequest): Promise; + class(request: import("../cosmos/nft/v1beta1/query").QueryClassRequest): Promise; + classes(request?: import("../cosmos/nft/v1beta1/query").QueryClassesRequest): Promise; + }; + }; + params: { + v1beta1: { + params(request: import("../cosmos/params/v1beta1/query").QueryParamsRequest): Promise; + subspaces(request?: import("../cosmos/params/v1beta1/query").QuerySubspacesRequest): Promise; + }; + }; + slashing: { + v1beta1: { + params(request?: import("../cosmos/slashing/v1beta1/query").QueryParamsRequest): Promise; + signingInfo(request: import("../cosmos/slashing/v1beta1/query").QuerySigningInfoRequest): Promise; + signingInfos(request?: import("../cosmos/slashing/v1beta1/query").QuerySigningInfosRequest): Promise; + }; + }; + staking: { + v1beta1: { + validators(request: import("../cosmos/staking/v1beta1/query").QueryValidatorsRequest): Promise; + validator(request: import("../cosmos/staking/v1beta1/query").QueryValidatorRequest): Promise; + validatorDelegations(request: import("../cosmos/staking/v1beta1/query").QueryValidatorDelegationsRequest): Promise; + validatorUnbondingDelegations(request: import("../cosmos/staking/v1beta1/query").QueryValidatorUnbondingDelegationsRequest): Promise; + delegation(request: import("../cosmos/staking/v1beta1/query").QueryDelegationRequest): Promise; + unbondingDelegation(request: import("../cosmos/staking/v1beta1/query").QueryUnbondingDelegationRequest): Promise; + delegatorDelegations(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorDelegationsRequest): Promise; + delegatorUnbondingDelegations(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorUnbondingDelegationsRequest): Promise; + redelegations(request: import("../cosmos/staking/v1beta1/query").QueryRedelegationsRequest): Promise; + delegatorValidators(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorValidatorsRequest): Promise; + delegatorValidator(request: import("../cosmos/staking/v1beta1/query").QueryDelegatorValidatorRequest): Promise; + historicalInfo(request: import("../cosmos/staking/v1beta1/query").QueryHistoricalInfoRequest): Promise; + pool(request?: import("../cosmos/staking/v1beta1/query").QueryPoolRequest): Promise; + params(request?: import("../cosmos/staking/v1beta1/query").QueryParamsRequest): Promise; + }; + }; + tx: { + v1beta1: { + simulate(request: import("../cosmos/tx/v1beta1/service").SimulateRequest): Promise; + getTx(request: import("../cosmos/tx/v1beta1/service").GetTxRequest): Promise; + broadcastTx(request: import("../cosmos/tx/v1beta1/service").BroadcastTxRequest): Promise; + getTxsEvent(request: import("../cosmos/tx/v1beta1/service").GetTxsEventRequest): Promise; + getBlockWithTxs(request: import("../cosmos/tx/v1beta1/service").GetBlockWithTxsRequest): Promise; + }; + }; + upgrade: { + v1beta1: { + currentPlan(request?: import("../cosmos/upgrade/v1beta1/query").QueryCurrentPlanRequest): Promise; + appliedPlan(request: import("../cosmos/upgrade/v1beta1/query").QueryAppliedPlanRequest): Promise; + upgradedConsensusState(request: import("../cosmos/upgrade/v1beta1/query").QueryUpgradedConsensusStateRequest): Promise; + moduleVersions(request: import("../cosmos/upgrade/v1beta1/query").QueryModuleVersionsRequest): Promise; + authority(request?: import("../cosmos/upgrade/v1beta1/query").QueryAuthorityRequest): Promise; + }; + }; + }; + ibc: { + applications: { + transfer: { + v1: { + denomTrace(request: import("./applications/transfer/v1/query").QueryDenomTraceRequest): Promise; + denomTraces(request?: import("./applications/transfer/v1/query").QueryDenomTracesRequest): Promise; + params(request?: import("./applications/transfer/v1/query").QueryParamsRequest): Promise; + }; + }; + }; + core: { + channel: { + v1: { + channel(request: import("./core/channel/v1/query").QueryChannelRequest): Promise; + channels(request?: import("./core/channel/v1/query").QueryChannelsRequest): Promise; + connectionChannels(request: import("./core/channel/v1/query").QueryConnectionChannelsRequest): Promise; + channelClientState(request: import("./core/channel/v1/query").QueryChannelClientStateRequest): Promise; + channelConsensusState(request: import("./core/channel/v1/query").QueryChannelConsensusStateRequest): Promise; + packetCommitment(request: import("./core/channel/v1/query").QueryPacketCommitmentRequest): Promise; + packetCommitments(request: import("./core/channel/v1/query").QueryPacketCommitmentsRequest): Promise; + packetReceipt(request: import("./core/channel/v1/query").QueryPacketReceiptRequest): Promise; + packetAcknowledgement(request: import("./core/channel/v1/query").QueryPacketAcknowledgementRequest): Promise; + packetAcknowledgements(request: import("./core/channel/v1/query").QueryPacketAcknowledgementsRequest): Promise; + unreceivedPackets(request: import("./core/channel/v1/query").QueryUnreceivedPacketsRequest): Promise; + unreceivedAcks(request: import("./core/channel/v1/query").QueryUnreceivedAcksRequest): Promise; + nextSequenceReceive(request: import("./core/channel/v1/query").QueryNextSequenceReceiveRequest): Promise; + }; + }; + client: { + v1: { + clientState(request: import("./core/client/v1/query").QueryClientStateRequest): Promise; + clientStates(request?: import("./core/client/v1/query").QueryClientStatesRequest): Promise; + consensusState(request: import("./core/client/v1/query").QueryConsensusStateRequest): Promise; + consensusStates(request: import("./core/client/v1/query").QueryConsensusStatesRequest): Promise; + clientStatus(request: import("./core/client/v1/query").QueryClientStatusRequest): Promise; + clientParams(request?: import("./core/client/v1/query").QueryClientParamsRequest): Promise; + upgradedClientState(request?: import("./core/client/v1/query").QueryUpgradedClientStateRequest): Promise; + upgradedConsensusState(request?: import("./core/client/v1/query").QueryUpgradedConsensusStateRequest): Promise; + }; + }; + connection: { + v1: { + connection(request: import("./core/connection/v1/query").QueryConnectionRequest): Promise; + connections(request?: import("./core/connection/v1/query").QueryConnectionsRequest): Promise; + clientConnections(request: import("./core/connection/v1/query").QueryClientConnectionsRequest): Promise; + connectionClientState(request: import("./core/connection/v1/query").QueryConnectionClientStateRequest): Promise; + connectionConsensusState(request: import("./core/connection/v1/query").QueryConnectionConsensusStateRequest): Promise; + }; + }; + port: { + v1: { + appVersion(request: import("./core/port/v1/query").QueryAppVersionRequest): Promise; + }; + }; + }; + }; +}>; diff --git a/packages/codegen/dist/ibc/rpc.tx.d.ts b/packages/codegen/dist/ibc/rpc.tx.d.ts new file mode 100644 index 00000000..f7d15613 --- /dev/null +++ b/packages/codegen/dist/ibc/rpc.tx.d.ts @@ -0,0 +1,65 @@ +import { Rpc } from "../helpers"; +export declare const createRPCMsgClient: ({ rpc }: { + rpc: Rpc; +}) => Promise<{ + cosmos: { + authz: { + v1beta1: import("../cosmos/authz/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + bank: { + v1beta1: import("../cosmos/bank/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + crisis: { + v1beta1: import("../cosmos/crisis/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + distribution: { + v1beta1: import("../cosmos/distribution/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + evidence: { + v1beta1: import("../cosmos/evidence/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + feegrant: { + v1beta1: import("../cosmos/feegrant/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + gov: { + v1: import("../cosmos/gov/v1/tx.rpc.msg").MsgClientImpl; + v1beta1: import("../cosmos/gov/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + group: { + v1: import("../cosmos/group/v1/tx.rpc.msg").MsgClientImpl; + }; + nft: { + v1beta1: import("../cosmos/nft/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + slashing: { + v1beta1: import("../cosmos/slashing/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + staking: { + v1beta1: import("../cosmos/staking/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + upgrade: { + v1beta1: import("../cosmos/upgrade/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + vesting: { + v1beta1: import("../cosmos/vesting/v1beta1/tx.rpc.msg").MsgClientImpl; + }; + }; + ibc: { + applications: { + transfer: { + v1: import("./applications/transfer/v1/tx.rpc.msg").MsgClientImpl; + }; + }; + core: { + channel: { + v1: import("./core/channel/v1/tx.rpc.msg").MsgClientImpl; + }; + client: { + v1: import("./core/client/v1/tx.rpc.msg").MsgClientImpl; + }; + connection: { + v1: import("./core/connection/v1/tx.rpc.msg").MsgClientImpl; + }; + }; + }; +}>; diff --git a/packages/codegen/dist/ics23/bundle.d.ts b/packages/codegen/dist/ics23/bundle.d.ts new file mode 100644 index 00000000..5793c757 --- /dev/null +++ b/packages/codegen/dist/ics23/bundle.d.ts @@ -0,0 +1,545 @@ +import * as _1 from "../confio/proofs"; +export declare const ics23: { + hashOpFromJSON(object: any): _1.HashOp; + hashOpToJSON(object: _1.HashOp): string; + lengthOpFromJSON(object: any): _1.LengthOp; + lengthOpToJSON(object: _1.LengthOp): string; + HashOp: typeof _1.HashOp; + HashOpSDKType: typeof _1.HashOp; + LengthOp: typeof _1.LengthOp; + LengthOpSDKType: typeof _1.LengthOp; + ExistenceProof: { + encode(message: _1.ExistenceProof, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.ExistenceProof; + fromPartial(object: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }): _1.ExistenceProof; + }; + NonExistenceProof: { + encode(message: _1.NonExistenceProof, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.NonExistenceProof; + fromPartial(object: { + key?: Uint8Array; + left?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + right?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + }): _1.NonExistenceProof; + }; + CommitmentProof: { + encode(message: _1.CommitmentProof, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.CommitmentProof; + fromPartial(object: { + exist?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + nonexist?: { + key?: Uint8Array; + left?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + right?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + }; + batch?: { + entries?: { + exist?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + nonexist?: { + key?: Uint8Array; + left?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + right?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + }; + }[]; + }; + compressed?: { + entries?: { + exist?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + nonexist?: { + key?: Uint8Array; + left?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + right?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + }; + }[]; + lookupInners?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + }): _1.CommitmentProof; + }; + LeafOp: { + encode(message: _1.LeafOp, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.LeafOp; + fromPartial(object: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }): _1.LeafOp; + }; + InnerOp: { + encode(message: _1.InnerOp, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.InnerOp; + fromPartial(object: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }): _1.InnerOp; + }; + ProofSpec: { + encode(message: _1.ProofSpec, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.ProofSpec; + fromPartial(object: { + leafSpec?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + innerSpec?: { + childOrder?: number[]; + childSize?: number; + minPrefixLength?: number; + maxPrefixLength?: number; + emptyChild?: Uint8Array; + hash?: _1.HashOp; + }; + maxDepth?: number; + minDepth?: number; + }): _1.ProofSpec; + }; + InnerSpec: { + encode(message: _1.InnerSpec, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.InnerSpec; + fromPartial(object: { + childOrder?: number[]; + childSize?: number; + minPrefixLength?: number; + maxPrefixLength?: number; + emptyChild?: Uint8Array; + hash?: _1.HashOp; + }): _1.InnerSpec; + }; + BatchProof: { + encode(message: _1.BatchProof, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.BatchProof; + fromPartial(object: { + entries?: { + exist?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + nonexist?: { + key?: Uint8Array; + left?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + right?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + }; + }[]; + }): _1.BatchProof; + }; + BatchEntry: { + encode(message: _1.BatchEntry, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.BatchEntry; + fromPartial(object: { + exist?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + nonexist?: { + key?: Uint8Array; + left?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + right?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }; + }; + }): _1.BatchEntry; + }; + CompressedBatchProof: { + encode(message: _1.CompressedBatchProof, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.CompressedBatchProof; + fromPartial(object: { + entries?: { + exist?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + nonexist?: { + key?: Uint8Array; + left?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + right?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + }; + }[]; + lookupInners?: { + hash?: _1.HashOp; + prefix?: Uint8Array; + suffix?: Uint8Array; + }[]; + }): _1.CompressedBatchProof; + }; + CompressedBatchEntry: { + encode(message: _1.CompressedBatchEntry, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.CompressedBatchEntry; + fromPartial(object: { + exist?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + nonexist?: { + key?: Uint8Array; + left?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + right?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + }; + }): _1.CompressedBatchEntry; + }; + CompressedExistenceProof: { + encode(message: _1.CompressedExistenceProof, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.CompressedExistenceProof; + fromPartial(object: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }): _1.CompressedExistenceProof; + }; + CompressedNonExistenceProof: { + encode(message: _1.CompressedNonExistenceProof, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _1.CompressedNonExistenceProof; + fromPartial(object: { + key?: Uint8Array; + left?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + right?: { + key?: Uint8Array; + value?: Uint8Array; + leaf?: { + hash?: _1.HashOp; + prehashKey?: _1.HashOp; + prehashValue?: _1.HashOp; + length?: _1.LengthOp; + prefix?: Uint8Array; + }; + path?: number[]; + }; + }): _1.CompressedNonExistenceProof; + }; +}; diff --git a/packages/codegen/dist/index.d.ts b/packages/codegen/dist/index.d.ts new file mode 100644 index 00000000..5e348be3 --- /dev/null +++ b/packages/codegen/dist/index.d.ts @@ -0,0 +1,14 @@ +/** + * This file and any referenced files were automatically generated by @osmonauts/telescope@0.88.2 + * DO NOT MODIFY BY HAND. Instead, download the latest proto files for your chain + * and run the transpile command or yarn proto command to regenerate this bundle. + */ +export * from "./amino/bundle"; +export * from "./ics23/bundle"; +export * from "./cosmos_proto/bundle"; +export * from "./cosmos/bundle"; +export * from "./cosmwasm/bundle"; +export * from "./gogoproto/bundle"; +export * from "./google/bundle"; +export * from "./ibc/bundle"; +export * from "./tendermint/bundle"; diff --git a/packages/codegen/dist/tendermint/abci/types.d.ts b/packages/codegen/dist/tendermint/abci/types.d.ts new file mode 100644 index 00000000..8ecf55d7 --- /dev/null +++ b/packages/codegen/dist/tendermint/abci/types.d.ts @@ -0,0 +1,833 @@ +/// +import { Header, HeaderSDKType } from "../types/types"; +import { ProofOps, ProofOpsSDKType } from "../crypto/proof"; +import { EvidenceParams, EvidenceParamsSDKType, ValidatorParams, ValidatorParamsSDKType, VersionParams, VersionParamsSDKType } from "../types/params"; +import { PublicKey, PublicKeySDKType } from "../crypto/keys"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +export declare enum CheckTxType { + NEW = 0, + RECHECK = 1, + UNRECOGNIZED = -1 +} +export declare const CheckTxTypeSDKType: typeof CheckTxType; +export declare function checkTxTypeFromJSON(object: any): CheckTxType; +export declare function checkTxTypeToJSON(object: CheckTxType): string; +export declare enum ResponseOfferSnapshot_Result { + /** UNKNOWN - Unknown result, abort all snapshot restoration */ + UNKNOWN = 0, + /** ACCEPT - Snapshot accepted, apply chunks */ + ACCEPT = 1, + /** ABORT - Abort all snapshot restoration */ + ABORT = 2, + /** REJECT - Reject this specific snapshot, try others */ + REJECT = 3, + /** REJECT_FORMAT - Reject all snapshots of this format, try others */ + REJECT_FORMAT = 4, + /** REJECT_SENDER - Reject all snapshots from the sender(s), try others */ + REJECT_SENDER = 5, + UNRECOGNIZED = -1 +} +export declare const ResponseOfferSnapshot_ResultSDKType: typeof ResponseOfferSnapshot_Result; +export declare function responseOfferSnapshot_ResultFromJSON(object: any): ResponseOfferSnapshot_Result; +export declare function responseOfferSnapshot_ResultToJSON(object: ResponseOfferSnapshot_Result): string; +export declare enum ResponseApplySnapshotChunk_Result { + /** UNKNOWN - Unknown result, abort all snapshot restoration */ + UNKNOWN = 0, + /** ACCEPT - Chunk successfully accepted */ + ACCEPT = 1, + /** ABORT - Abort all snapshot restoration */ + ABORT = 2, + /** RETRY - Retry chunk (combine with refetch and reject) */ + RETRY = 3, + /** RETRY_SNAPSHOT - Retry snapshot (combine with refetch and reject) */ + RETRY_SNAPSHOT = 4, + /** REJECT_SNAPSHOT - Reject this snapshot, try others */ + REJECT_SNAPSHOT = 5, + UNRECOGNIZED = -1 +} +export declare const ResponseApplySnapshotChunk_ResultSDKType: typeof ResponseApplySnapshotChunk_Result; +export declare function responseApplySnapshotChunk_ResultFromJSON(object: any): ResponseApplySnapshotChunk_Result; +export declare function responseApplySnapshotChunk_ResultToJSON(object: ResponseApplySnapshotChunk_Result): string; +export declare enum EvidenceType { + UNKNOWN = 0, + DUPLICATE_VOTE = 1, + LIGHT_CLIENT_ATTACK = 2, + UNRECOGNIZED = -1 +} +export declare const EvidenceTypeSDKType: typeof EvidenceType; +export declare function evidenceTypeFromJSON(object: any): EvidenceType; +export declare function evidenceTypeToJSON(object: EvidenceType): string; +export interface Request { + echo?: RequestEcho; + flush?: RequestFlush; + info?: RequestInfo; + setOption?: RequestSetOption; + initChain?: RequestInitChain; + query?: RequestQuery; + beginBlock?: RequestBeginBlock; + checkTx?: RequestCheckTx; + deliverTx?: RequestDeliverTx; + endBlock?: RequestEndBlock; + commit?: RequestCommit; + listSnapshots?: RequestListSnapshots; + offerSnapshot?: RequestOfferSnapshot; + loadSnapshotChunk?: RequestLoadSnapshotChunk; + applySnapshotChunk?: RequestApplySnapshotChunk; +} +export interface RequestSDKType { + echo?: RequestEchoSDKType; + flush?: RequestFlushSDKType; + info?: RequestInfoSDKType; + set_option?: RequestSetOptionSDKType; + init_chain?: RequestInitChainSDKType; + query?: RequestQuerySDKType; + begin_block?: RequestBeginBlockSDKType; + check_tx?: RequestCheckTxSDKType; + deliver_tx?: RequestDeliverTxSDKType; + end_block?: RequestEndBlockSDKType; + commit?: RequestCommitSDKType; + list_snapshots?: RequestListSnapshotsSDKType; + offer_snapshot?: RequestOfferSnapshotSDKType; + load_snapshot_chunk?: RequestLoadSnapshotChunkSDKType; + apply_snapshot_chunk?: RequestApplySnapshotChunkSDKType; +} +export interface RequestEcho { + message: string; +} +export interface RequestEchoSDKType { + message: string; +} +export interface RequestFlush { +} +export interface RequestFlushSDKType { +} +export interface RequestInfo { + version: string; + blockVersion: Long; + p2pVersion: Long; +} +export interface RequestInfoSDKType { + version: string; + block_version: Long; + p2p_version: Long; +} +/** nondeterministic */ +export interface RequestSetOption { + key: string; + value: string; +} +/** nondeterministic */ +export interface RequestSetOptionSDKType { + key: string; + value: string; +} +export interface RequestInitChain { + time?: Date; + chainId: string; + consensusParams?: ConsensusParams; + validators: ValidatorUpdate[]; + appStateBytes: Uint8Array; + initialHeight: Long; +} +export interface RequestInitChainSDKType { + time?: Date; + chain_id: string; + consensus_params?: ConsensusParamsSDKType; + validators: ValidatorUpdateSDKType[]; + app_state_bytes: Uint8Array; + initial_height: Long; +} +export interface RequestQuery { + data: Uint8Array; + path: string; + height: Long; + prove: boolean; +} +export interface RequestQuerySDKType { + data: Uint8Array; + path: string; + height: Long; + prove: boolean; +} +export interface RequestBeginBlock { + hash: Uint8Array; + header?: Header; + lastCommitInfo?: LastCommitInfo; + byzantineValidators: Evidence[]; +} +export interface RequestBeginBlockSDKType { + hash: Uint8Array; + header?: HeaderSDKType; + last_commit_info?: LastCommitInfoSDKType; + byzantine_validators: EvidenceSDKType[]; +} +export interface RequestCheckTx { + tx: Uint8Array; + type: CheckTxType; +} +export interface RequestCheckTxSDKType { + tx: Uint8Array; + type: CheckTxType; +} +export interface RequestDeliverTx { + tx: Uint8Array; +} +export interface RequestDeliverTxSDKType { + tx: Uint8Array; +} +export interface RequestEndBlock { + height: Long; +} +export interface RequestEndBlockSDKType { + height: Long; +} +export interface RequestCommit { +} +export interface RequestCommitSDKType { +} +/** lists available snapshots */ +export interface RequestListSnapshots { +} +/** lists available snapshots */ +export interface RequestListSnapshotsSDKType { +} +/** offers a snapshot to the application */ +export interface RequestOfferSnapshot { + /** snapshot offered by peers */ + snapshot?: Snapshot; + /** light client-verified app hash for snapshot height */ + appHash: Uint8Array; +} +/** offers a snapshot to the application */ +export interface RequestOfferSnapshotSDKType { + snapshot?: SnapshotSDKType; + app_hash: Uint8Array; +} +/** loads a snapshot chunk */ +export interface RequestLoadSnapshotChunk { + height: Long; + format: number; + chunk: number; +} +/** loads a snapshot chunk */ +export interface RequestLoadSnapshotChunkSDKType { + height: Long; + format: number; + chunk: number; +} +/** Applies a snapshot chunk */ +export interface RequestApplySnapshotChunk { + index: number; + chunk: Uint8Array; + sender: string; +} +/** Applies a snapshot chunk */ +export interface RequestApplySnapshotChunkSDKType { + index: number; + chunk: Uint8Array; + sender: string; +} +export interface Response { + exception?: ResponseException; + echo?: ResponseEcho; + flush?: ResponseFlush; + info?: ResponseInfo; + setOption?: ResponseSetOption; + initChain?: ResponseInitChain; + query?: ResponseQuery; + beginBlock?: ResponseBeginBlock; + checkTx?: ResponseCheckTx; + deliverTx?: ResponseDeliverTx; + endBlock?: ResponseEndBlock; + commit?: ResponseCommit; + listSnapshots?: ResponseListSnapshots; + offerSnapshot?: ResponseOfferSnapshot; + loadSnapshotChunk?: ResponseLoadSnapshotChunk; + applySnapshotChunk?: ResponseApplySnapshotChunk; +} +export interface ResponseSDKType { + exception?: ResponseExceptionSDKType; + echo?: ResponseEchoSDKType; + flush?: ResponseFlushSDKType; + info?: ResponseInfoSDKType; + set_option?: ResponseSetOptionSDKType; + init_chain?: ResponseInitChainSDKType; + query?: ResponseQuerySDKType; + begin_block?: ResponseBeginBlockSDKType; + check_tx?: ResponseCheckTxSDKType; + deliver_tx?: ResponseDeliverTxSDKType; + end_block?: ResponseEndBlockSDKType; + commit?: ResponseCommitSDKType; + list_snapshots?: ResponseListSnapshotsSDKType; + offer_snapshot?: ResponseOfferSnapshotSDKType; + load_snapshot_chunk?: ResponseLoadSnapshotChunkSDKType; + apply_snapshot_chunk?: ResponseApplySnapshotChunkSDKType; +} +/** nondeterministic */ +export interface ResponseException { + error: string; +} +/** nondeterministic */ +export interface ResponseExceptionSDKType { + error: string; +} +export interface ResponseEcho { + message: string; +} +export interface ResponseEchoSDKType { + message: string; +} +export interface ResponseFlush { +} +export interface ResponseFlushSDKType { +} +export interface ResponseInfo { + data: string; + version: string; + appVersion: Long; + lastBlockHeight: Long; + lastBlockAppHash: Uint8Array; +} +export interface ResponseInfoSDKType { + data: string; + version: string; + app_version: Long; + last_block_height: Long; + last_block_app_hash: Uint8Array; +} +/** nondeterministic */ +export interface ResponseSetOption { + code: number; + /** bytes data = 2; */ + log: string; + info: string; +} +/** nondeterministic */ +export interface ResponseSetOptionSDKType { + code: number; + log: string; + info: string; +} +export interface ResponseInitChain { + consensusParams?: ConsensusParams; + validators: ValidatorUpdate[]; + appHash: Uint8Array; +} +export interface ResponseInitChainSDKType { + consensus_params?: ConsensusParamsSDKType; + validators: ValidatorUpdateSDKType[]; + app_hash: Uint8Array; +} +export interface ResponseQuery { + code: number; + /** bytes data = 2; // use "value" instead. */ + log: string; + /** nondeterministic */ + info: string; + index: Long; + key: Uint8Array; + value: Uint8Array; + proofOps?: ProofOps; + height: Long; + codespace: string; +} +export interface ResponseQuerySDKType { + code: number; + log: string; + info: string; + index: Long; + key: Uint8Array; + value: Uint8Array; + proof_ops?: ProofOpsSDKType; + height: Long; + codespace: string; +} +export interface ResponseBeginBlock { + events: Event[]; +} +export interface ResponseBeginBlockSDKType { + events: EventSDKType[]; +} +export interface ResponseCheckTx { + code: number; + data: Uint8Array; + /** nondeterministic */ + log: string; + /** nondeterministic */ + info: string; + gasWanted: Long; + gasUsed: Long; + events: Event[]; + codespace: string; +} +export interface ResponseCheckTxSDKType { + code: number; + data: Uint8Array; + log: string; + info: string; + gas_wanted: Long; + gas_used: Long; + events: EventSDKType[]; + codespace: string; +} +export interface ResponseDeliverTx { + code: number; + data: Uint8Array; + /** nondeterministic */ + log: string; + /** nondeterministic */ + info: string; + gasWanted: Long; + gasUsed: Long; + events: Event[]; + codespace: string; +} +export interface ResponseDeliverTxSDKType { + code: number; + data: Uint8Array; + log: string; + info: string; + gas_wanted: Long; + gas_used: Long; + events: EventSDKType[]; + codespace: string; +} +export interface ResponseEndBlock { + validatorUpdates: ValidatorUpdate[]; + consensusParamUpdates?: ConsensusParams; + events: Event[]; +} +export interface ResponseEndBlockSDKType { + validator_updates: ValidatorUpdateSDKType[]; + consensus_param_updates?: ConsensusParamsSDKType; + events: EventSDKType[]; +} +export interface ResponseCommit { + /** reserve 1 */ + data: Uint8Array; + retainHeight: Long; +} +export interface ResponseCommitSDKType { + data: Uint8Array; + retain_height: Long; +} +export interface ResponseListSnapshots { + snapshots: Snapshot[]; +} +export interface ResponseListSnapshotsSDKType { + snapshots: SnapshotSDKType[]; +} +export interface ResponseOfferSnapshot { + result: ResponseOfferSnapshot_Result; +} +export interface ResponseOfferSnapshotSDKType { + result: ResponseOfferSnapshot_Result; +} +export interface ResponseLoadSnapshotChunk { + chunk: Uint8Array; +} +export interface ResponseLoadSnapshotChunkSDKType { + chunk: Uint8Array; +} +export interface ResponseApplySnapshotChunk { + result: ResponseApplySnapshotChunk_Result; + /** Chunks to refetch and reapply */ + refetchChunks: number[]; + /** Chunk senders to reject and ban */ + rejectSenders: string[]; +} +export interface ResponseApplySnapshotChunkSDKType { + result: ResponseApplySnapshotChunk_Result; + refetch_chunks: number[]; + reject_senders: string[]; +} +/** + * ConsensusParams contains all consensus-relevant parameters + * that can be adjusted by the abci app + */ +export interface ConsensusParams { + block?: BlockParams; + evidence?: EvidenceParams; + validator?: ValidatorParams; + version?: VersionParams; +} +/** + * ConsensusParams contains all consensus-relevant parameters + * that can be adjusted by the abci app + */ +export interface ConsensusParamsSDKType { + block?: BlockParamsSDKType; + evidence?: EvidenceParamsSDKType; + validator?: ValidatorParamsSDKType; + version?: VersionParamsSDKType; +} +/** BlockParams contains limits on the block size. */ +export interface BlockParams { + /** Note: must be greater than 0 */ + maxBytes: Long; + /** Note: must be greater or equal to -1 */ + maxGas: Long; +} +/** BlockParams contains limits on the block size. */ +export interface BlockParamsSDKType { + max_bytes: Long; + max_gas: Long; +} +export interface LastCommitInfo { + round: number; + votes: VoteInfo[]; +} +export interface LastCommitInfoSDKType { + round: number; + votes: VoteInfoSDKType[]; +} +/** + * Event allows application developers to attach additional information to + * ResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx. + * Later, transactions may be queried using these events. + */ +export interface Event { + type: string; + attributes: EventAttribute[]; +} +/** + * Event allows application developers to attach additional information to + * ResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx. + * Later, transactions may be queried using these events. + */ +export interface EventSDKType { + type: string; + attributes: EventAttributeSDKType[]; +} +/** EventAttribute is a single key-value pair, associated with an event. */ +export interface EventAttribute { + key: Uint8Array; + value: Uint8Array; + /** nondeterministic */ + index: boolean; +} +/** EventAttribute is a single key-value pair, associated with an event. */ +export interface EventAttributeSDKType { + key: Uint8Array; + value: Uint8Array; + index: boolean; +} +/** + * TxResult contains results of executing the transaction. + * + * One usage is indexing transaction results. + */ +export interface TxResult { + height: Long; + index: number; + tx: Uint8Array; + result?: ResponseDeliverTx; +} +/** + * TxResult contains results of executing the transaction. + * + * One usage is indexing transaction results. + */ +export interface TxResultSDKType { + height: Long; + index: number; + tx: Uint8Array; + result?: ResponseDeliverTxSDKType; +} +/** Validator */ +export interface Validator { + /** + * The first 20 bytes of SHA256(public key) + * PubKey pub_key = 2 [(gogoproto.nullable)=false]; + */ + address: Uint8Array; + /** The voting power */ + power: Long; +} +/** Validator */ +export interface ValidatorSDKType { + address: Uint8Array; + power: Long; +} +/** ValidatorUpdate */ +export interface ValidatorUpdate { + pubKey?: PublicKey; + power: Long; +} +/** ValidatorUpdate */ +export interface ValidatorUpdateSDKType { + pub_key?: PublicKeySDKType; + power: Long; +} +/** VoteInfo */ +export interface VoteInfo { + validator?: Validator; + signedLastBlock: boolean; +} +/** VoteInfo */ +export interface VoteInfoSDKType { + validator?: ValidatorSDKType; + signed_last_block: boolean; +} +export interface Evidence { + type: EvidenceType; + /** The offending validator */ + validator?: Validator; + /** The height when the offense occurred */ + height: Long; + /** The corresponding time where the offense occurred */ + time?: Date; + /** + * Total voting power of the validator set in case the ABCI application does + * not store historical validators. + * https://github.com/tendermint/tendermint/issues/4581 + */ + totalVotingPower: Long; +} +export interface EvidenceSDKType { + type: EvidenceType; + validator?: ValidatorSDKType; + height: Long; + time?: Date; + total_voting_power: Long; +} +export interface Snapshot { + /** The height at which the snapshot was taken */ + height: Long; + /** The application-specific snapshot format */ + format: number; + /** Number of chunks in the snapshot */ + chunks: number; + /** Arbitrary snapshot hash, equal only if identical */ + hash: Uint8Array; + /** Arbitrary application metadata */ + metadata: Uint8Array; +} +export interface SnapshotSDKType { + height: Long; + format: number; + chunks: number; + hash: Uint8Array; + metadata: Uint8Array; +} +export declare const Request: { + encode(message: Request, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Request; + fromPartial(object: DeepPartial): Request; +}; +export declare const RequestEcho: { + encode(message: RequestEcho, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestEcho; + fromPartial(object: DeepPartial): RequestEcho; +}; +export declare const RequestFlush: { + encode(_: RequestFlush, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestFlush; + fromPartial(_: DeepPartial): RequestFlush; +}; +export declare const RequestInfo: { + encode(message: RequestInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestInfo; + fromPartial(object: DeepPartial): RequestInfo; +}; +export declare const RequestSetOption: { + encode(message: RequestSetOption, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestSetOption; + fromPartial(object: DeepPartial): RequestSetOption; +}; +export declare const RequestInitChain: { + encode(message: RequestInitChain, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestInitChain; + fromPartial(object: DeepPartial): RequestInitChain; +}; +export declare const RequestQuery: { + encode(message: RequestQuery, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestQuery; + fromPartial(object: DeepPartial): RequestQuery; +}; +export declare const RequestBeginBlock: { + encode(message: RequestBeginBlock, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestBeginBlock; + fromPartial(object: DeepPartial): RequestBeginBlock; +}; +export declare const RequestCheckTx: { + encode(message: RequestCheckTx, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestCheckTx; + fromPartial(object: DeepPartial): RequestCheckTx; +}; +export declare const RequestDeliverTx: { + encode(message: RequestDeliverTx, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestDeliverTx; + fromPartial(object: DeepPartial): RequestDeliverTx; +}; +export declare const RequestEndBlock: { + encode(message: RequestEndBlock, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestEndBlock; + fromPartial(object: DeepPartial): RequestEndBlock; +}; +export declare const RequestCommit: { + encode(_: RequestCommit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestCommit; + fromPartial(_: DeepPartial): RequestCommit; +}; +export declare const RequestListSnapshots: { + encode(_: RequestListSnapshots, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestListSnapshots; + fromPartial(_: DeepPartial): RequestListSnapshots; +}; +export declare const RequestOfferSnapshot: { + encode(message: RequestOfferSnapshot, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestOfferSnapshot; + fromPartial(object: DeepPartial): RequestOfferSnapshot; +}; +export declare const RequestLoadSnapshotChunk: { + encode(message: RequestLoadSnapshotChunk, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestLoadSnapshotChunk; + fromPartial(object: DeepPartial): RequestLoadSnapshotChunk; +}; +export declare const RequestApplySnapshotChunk: { + encode(message: RequestApplySnapshotChunk, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): RequestApplySnapshotChunk; + fromPartial(object: DeepPartial): RequestApplySnapshotChunk; +}; +export declare const Response: { + encode(message: Response, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Response; + fromPartial(object: DeepPartial): Response; +}; +export declare const ResponseException: { + encode(message: ResponseException, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseException; + fromPartial(object: DeepPartial): ResponseException; +}; +export declare const ResponseEcho: { + encode(message: ResponseEcho, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseEcho; + fromPartial(object: DeepPartial): ResponseEcho; +}; +export declare const ResponseFlush: { + encode(_: ResponseFlush, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseFlush; + fromPartial(_: DeepPartial): ResponseFlush; +}; +export declare const ResponseInfo: { + encode(message: ResponseInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseInfo; + fromPartial(object: DeepPartial): ResponseInfo; +}; +export declare const ResponseSetOption: { + encode(message: ResponseSetOption, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseSetOption; + fromPartial(object: DeepPartial): ResponseSetOption; +}; +export declare const ResponseInitChain: { + encode(message: ResponseInitChain, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseInitChain; + fromPartial(object: DeepPartial): ResponseInitChain; +}; +export declare const ResponseQuery: { + encode(message: ResponseQuery, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseQuery; + fromPartial(object: DeepPartial): ResponseQuery; +}; +export declare const ResponseBeginBlock: { + encode(message: ResponseBeginBlock, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseBeginBlock; + fromPartial(object: DeepPartial): ResponseBeginBlock; +}; +export declare const ResponseCheckTx: { + encode(message: ResponseCheckTx, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseCheckTx; + fromPartial(object: DeepPartial): ResponseCheckTx; +}; +export declare const ResponseDeliverTx: { + encode(message: ResponseDeliverTx, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseDeliverTx; + fromPartial(object: DeepPartial): ResponseDeliverTx; +}; +export declare const ResponseEndBlock: { + encode(message: ResponseEndBlock, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseEndBlock; + fromPartial(object: DeepPartial): ResponseEndBlock; +}; +export declare const ResponseCommit: { + encode(message: ResponseCommit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseCommit; + fromPartial(object: DeepPartial): ResponseCommit; +}; +export declare const ResponseListSnapshots: { + encode(message: ResponseListSnapshots, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseListSnapshots; + fromPartial(object: DeepPartial): ResponseListSnapshots; +}; +export declare const ResponseOfferSnapshot: { + encode(message: ResponseOfferSnapshot, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseOfferSnapshot; + fromPartial(object: DeepPartial): ResponseOfferSnapshot; +}; +export declare const ResponseLoadSnapshotChunk: { + encode(message: ResponseLoadSnapshotChunk, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseLoadSnapshotChunk; + fromPartial(object: DeepPartial): ResponseLoadSnapshotChunk; +}; +export declare const ResponseApplySnapshotChunk: { + encode(message: ResponseApplySnapshotChunk, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseApplySnapshotChunk; + fromPartial(object: DeepPartial): ResponseApplySnapshotChunk; +}; +export declare const ConsensusParams: { + encode(message: ConsensusParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusParams; + fromPartial(object: DeepPartial): ConsensusParams; +}; +export declare const BlockParams: { + encode(message: BlockParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BlockParams; + fromPartial(object: DeepPartial): BlockParams; +}; +export declare const LastCommitInfo: { + encode(message: LastCommitInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): LastCommitInfo; + fromPartial(object: DeepPartial): LastCommitInfo; +}; +export declare const Event: { + encode(message: Event, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Event; + fromPartial(object: DeepPartial): Event; +}; +export declare const EventAttribute: { + encode(message: EventAttribute, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EventAttribute; + fromPartial(object: DeepPartial): EventAttribute; +}; +export declare const TxResult: { + encode(message: TxResult, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TxResult; + fromPartial(object: DeepPartial): TxResult; +}; +export declare const Validator: { + encode(message: Validator, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Validator; + fromPartial(object: DeepPartial): Validator; +}; +export declare const ValidatorUpdate: { + encode(message: ValidatorUpdate, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorUpdate; + fromPartial(object: DeepPartial): ValidatorUpdate; +}; +export declare const VoteInfo: { + encode(message: VoteInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): VoteInfo; + fromPartial(object: DeepPartial): VoteInfo; +}; +export declare const Evidence: { + encode(message: Evidence, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Evidence; + fromPartial(object: DeepPartial): Evidence; +}; +export declare const Snapshot: { + encode(message: Snapshot, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Snapshot; + fromPartial(object: DeepPartial): Snapshot; +}; diff --git a/packages/codegen/dist/tendermint/bundle.d.ts b/packages/codegen/dist/tendermint/bundle.d.ts new file mode 100644 index 00000000..85f6bcdd --- /dev/null +++ b/packages/codegen/dist/tendermint/bundle.d.ts @@ -0,0 +1,2066 @@ +/// +import * as _134 from "./abci/types"; +import * as _135 from "./crypto/keys"; +import * as _136 from "./crypto/proof"; +import * as _137 from "./libs/bits/types"; +import * as _138 from "./p2p/types"; +import * as _139 from "./types/block"; +import * as _140 from "./types/evidence"; +import * as _141 from "./types/params"; +import * as _142 from "./types/types"; +import * as _143 from "./types/validator"; +import * as _144 from "./version/types"; +export declare namespace tendermint { + const abci: { + checkTxTypeFromJSON(object: any): _134.CheckTxType; + checkTxTypeToJSON(object: _134.CheckTxType): string; + responseOfferSnapshot_ResultFromJSON(object: any): _134.ResponseOfferSnapshot_Result; + responseOfferSnapshot_ResultToJSON(object: _134.ResponseOfferSnapshot_Result): string; + responseApplySnapshotChunk_ResultFromJSON(object: any): _134.ResponseApplySnapshotChunk_Result; + responseApplySnapshotChunk_ResultToJSON(object: _134.ResponseApplySnapshotChunk_Result): string; + evidenceTypeFromJSON(object: any): _134.EvidenceType; + evidenceTypeToJSON(object: _134.EvidenceType): string; + CheckTxType: typeof _134.CheckTxType; + CheckTxTypeSDKType: typeof _134.CheckTxType; + ResponseOfferSnapshot_Result: typeof _134.ResponseOfferSnapshot_Result; + ResponseOfferSnapshot_ResultSDKType: typeof _134.ResponseOfferSnapshot_Result; + ResponseApplySnapshotChunk_Result: typeof _134.ResponseApplySnapshotChunk_Result; + ResponseApplySnapshotChunk_ResultSDKType: typeof _134.ResponseApplySnapshotChunk_Result; + EvidenceType: typeof _134.EvidenceType; + EvidenceTypeSDKType: typeof _134.EvidenceType; + Request: { + encode(message: _134.Request, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.Request; + fromPartial(object: { + echo?: { + message?: string; + }; + flush?: {}; + info?: { + version?: string; + blockVersion?: string | number | import("long").Long; + p2pVersion?: string | number | import("long").Long; + }; + setOption?: { + key?: string; + value?: string; + }; + initChain?: { + time?: Date; + chainId?: string; + consensusParams?: { + block?: { + maxBytes?: string | number | import("long").Long; + maxGas?: string | number | import("long").Long; + }; + evidence?: { + maxAgeNumBlocks?: string | number | import("long").Long; + maxAgeDuration?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxBytes?: string | number | import("long").Long; + }; + validator?: { + pubKeyTypes?: string[]; + }; + version?: { + appVersion?: string | number | import("long").Long; + }; + }; + validators?: { + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + power?: string | number | import("long").Long; + }[]; + appStateBytes?: Uint8Array; + initialHeight?: string | number | import("long").Long; + }; + query?: { + data?: Uint8Array; + path?: string; + height?: string | number | import("long").Long; + prove?: boolean; + }; + beginBlock?: { + hash?: Uint8Array; + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + lastCommitInfo?: { + round?: number; + votes?: { + validator?: { + address?: Uint8Array; + power?: string | number | import("long").Long; + }; + signedLastBlock?: boolean; + }[]; + }; + byzantineValidators?: { + type?: _134.EvidenceType; + validator?: { + address?: Uint8Array; + power?: string | number | import("long").Long; + }; + height?: string | number | import("long").Long; + time?: Date; + totalVotingPower?: string | number | import("long").Long; + }[]; + }; + checkTx?: { + tx?: Uint8Array; + type?: _134.CheckTxType; + }; + deliverTx?: { + tx?: Uint8Array; + }; + endBlock?: { + height?: string | number | import("long").Long; + }; + commit?: {}; + listSnapshots?: {}; + offerSnapshot?: { + snapshot?: { + height?: string | number | import("long").Long; + format?: number; + chunks?: number; + hash?: Uint8Array; + metadata?: Uint8Array; + }; + appHash?: Uint8Array; + }; + loadSnapshotChunk?: { + height?: string | number | import("long").Long; + format?: number; + chunk?: number; + }; + applySnapshotChunk?: { + index?: number; + chunk?: Uint8Array; + sender?: string; + }; + }): _134.Request; + }; + RequestEcho: { + encode(message: _134.RequestEcho, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestEcho; + fromPartial(object: { + message?: string; + }): _134.RequestEcho; + }; + RequestFlush: { + encode(_: _134.RequestFlush, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestFlush; + fromPartial(_: {}): _134.RequestFlush; + }; + RequestInfo: { + encode(message: _134.RequestInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestInfo; + fromPartial(object: { + version?: string; + blockVersion?: string | number | import("long").Long; + p2pVersion?: string | number | import("long").Long; + }): _134.RequestInfo; + }; + RequestSetOption: { + encode(message: _134.RequestSetOption, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestSetOption; + fromPartial(object: { + key?: string; + value?: string; + }): _134.RequestSetOption; + }; + RequestInitChain: { + encode(message: _134.RequestInitChain, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestInitChain; + fromPartial(object: { + time?: Date; + chainId?: string; + consensusParams?: { + block?: { + maxBytes?: string | number | import("long").Long; + maxGas?: string | number | import("long").Long; + }; + evidence?: { + maxAgeNumBlocks?: string | number | import("long").Long; + maxAgeDuration?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxBytes?: string | number | import("long").Long; + }; + validator?: { + pubKeyTypes?: string[]; + }; + version?: { + appVersion?: string | number | import("long").Long; + }; + }; + validators?: { + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + power?: string | number | import("long").Long; + }[]; + appStateBytes?: Uint8Array; + initialHeight?: string | number | import("long").Long; + }): _134.RequestInitChain; + }; + RequestQuery: { + encode(message: _134.RequestQuery, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestQuery; + fromPartial(object: { + data?: Uint8Array; + path?: string; + height?: string | number | import("long").Long; + prove?: boolean; + }): _134.RequestQuery; + }; + RequestBeginBlock: { + encode(message: _134.RequestBeginBlock, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestBeginBlock; + fromPartial(object: { + hash?: Uint8Array; + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + lastCommitInfo?: { + round?: number; + votes?: { + validator?: { + address?: Uint8Array; + power?: string | number | import("long").Long; + }; + signedLastBlock?: boolean; + }[]; + }; + byzantineValidators?: { + type?: _134.EvidenceType; + validator?: { + address?: Uint8Array; + power?: string | number | import("long").Long; + }; + height?: string | number | import("long").Long; + time?: Date; + totalVotingPower?: string | number | import("long").Long; + }[]; + }): _134.RequestBeginBlock; + }; + RequestCheckTx: { + encode(message: _134.RequestCheckTx, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestCheckTx; + fromPartial(object: { + tx?: Uint8Array; + type?: _134.CheckTxType; + }): _134.RequestCheckTx; + }; + RequestDeliverTx: { + encode(message: _134.RequestDeliverTx, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestDeliverTx; + fromPartial(object: { + tx?: Uint8Array; + }): _134.RequestDeliverTx; + }; + RequestEndBlock: { + encode(message: _134.RequestEndBlock, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestEndBlock; + fromPartial(object: { + height?: string | number | import("long").Long; + }): _134.RequestEndBlock; + }; + RequestCommit: { + encode(_: _134.RequestCommit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestCommit; + fromPartial(_: {}): _134.RequestCommit; + }; + RequestListSnapshots: { + encode(_: _134.RequestListSnapshots, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestListSnapshots; + fromPartial(_: {}): _134.RequestListSnapshots; + }; + RequestOfferSnapshot: { + encode(message: _134.RequestOfferSnapshot, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestOfferSnapshot; + fromPartial(object: { + snapshot?: { + height?: string | number | import("long").Long; + format?: number; + chunks?: number; + hash?: Uint8Array; + metadata?: Uint8Array; + }; + appHash?: Uint8Array; + }): _134.RequestOfferSnapshot; + }; + RequestLoadSnapshotChunk: { + encode(message: _134.RequestLoadSnapshotChunk, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestLoadSnapshotChunk; + fromPartial(object: { + height?: string | number | import("long").Long; + format?: number; + chunk?: number; + }): _134.RequestLoadSnapshotChunk; + }; + RequestApplySnapshotChunk: { + encode(message: _134.RequestApplySnapshotChunk, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.RequestApplySnapshotChunk; + fromPartial(object: { + index?: number; + chunk?: Uint8Array; + sender?: string; + }): _134.RequestApplySnapshotChunk; + }; + Response: { + encode(message: _134.Response, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.Response; + fromPartial(object: { + exception?: { + error?: string; + }; + echo?: { + message?: string; + }; + flush?: {}; + info?: { + data?: string; + version?: string; + appVersion?: string | number | import("long").Long; + lastBlockHeight?: string | number | import("long").Long; + lastBlockAppHash?: Uint8Array; + }; + setOption?: { + code?: number; + log?: string; + info?: string; + }; + initChain?: { + consensusParams?: { + block?: { + maxBytes?: string | number | import("long").Long; + maxGas?: string | number | import("long").Long; + }; + evidence?: { + maxAgeNumBlocks?: string | number | import("long").Long; + maxAgeDuration?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxBytes?: string | number | import("long").Long; + }; + validator?: { + pubKeyTypes?: string[]; + }; + version?: { + appVersion?: string | number | import("long").Long; + }; + }; + validators?: { + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + power?: string | number | import("long").Long; + }[]; + appHash?: Uint8Array; + }; + query?: { + code?: number; + log?: string; + info?: string; + index?: string | number | import("long").Long; + key?: Uint8Array; + value?: Uint8Array; + proofOps?: { + ops?: { + type?: string; + key?: Uint8Array; + data?: Uint8Array; + }[]; + }; + height?: string | number | import("long").Long; + codespace?: string; + }; + beginBlock?: { + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + }; + checkTx?: { + code?: number; + data?: Uint8Array; + log?: string; + info?: string; + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + codespace?: string; + }; + deliverTx?: { + code?: number; + data?: Uint8Array; + log?: string; + info?: string; + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + codespace?: string; + }; + endBlock?: { + validatorUpdates?: { + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + power?: string | number | import("long").Long; + }[]; + consensusParamUpdates?: { + block?: { + maxBytes?: string | number | import("long").Long; + maxGas?: string | number | import("long").Long; + }; + evidence?: { + maxAgeNumBlocks?: string | number | import("long").Long; + maxAgeDuration?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxBytes?: string | number | import("long").Long; + }; + validator?: { + pubKeyTypes?: string[]; + }; + version?: { + appVersion?: string | number | import("long").Long; + }; + }; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + }; + commit?: { + data?: Uint8Array; + retainHeight?: string | number | import("long").Long; + }; + listSnapshots?: { + snapshots?: { + height?: string | number | import("long").Long; + format?: number; + chunks?: number; + hash?: Uint8Array; + metadata?: Uint8Array; + }[]; + }; + offerSnapshot?: { + result?: _134.ResponseOfferSnapshot_Result; + }; + loadSnapshotChunk?: { + chunk?: Uint8Array; + }; + applySnapshotChunk?: { + result?: _134.ResponseApplySnapshotChunk_Result; + refetchChunks?: number[]; + rejectSenders?: string[]; + }; + }): _134.Response; + }; + ResponseException: { + encode(message: _134.ResponseException, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseException; + fromPartial(object: { + error?: string; + }): _134.ResponseException; + }; + ResponseEcho: { + encode(message: _134.ResponseEcho, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseEcho; + fromPartial(object: { + message?: string; + }): _134.ResponseEcho; + }; + ResponseFlush: { + encode(_: _134.ResponseFlush, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseFlush; + fromPartial(_: {}): _134.ResponseFlush; + }; + ResponseInfo: { + encode(message: _134.ResponseInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseInfo; + fromPartial(object: { + data?: string; + version?: string; + appVersion?: string | number | import("long").Long; + lastBlockHeight?: string | number | import("long").Long; + lastBlockAppHash?: Uint8Array; + }): _134.ResponseInfo; + }; + ResponseSetOption: { + encode(message: _134.ResponseSetOption, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseSetOption; + fromPartial(object: { + code?: number; + log?: string; + info?: string; + }): _134.ResponseSetOption; + }; + ResponseInitChain: { + encode(message: _134.ResponseInitChain, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseInitChain; + fromPartial(object: { + consensusParams?: { + block?: { + maxBytes?: string | number | import("long").Long; + maxGas?: string | number | import("long").Long; + }; + evidence?: { + maxAgeNumBlocks?: string | number | import("long").Long; + maxAgeDuration?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxBytes?: string | number | import("long").Long; + }; + validator?: { + pubKeyTypes?: string[]; + }; + version?: { + appVersion?: string | number | import("long").Long; + }; + }; + validators?: { + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + power?: string | number | import("long").Long; + }[]; + appHash?: Uint8Array; + }): _134.ResponseInitChain; + }; + ResponseQuery: { + encode(message: _134.ResponseQuery, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseQuery; + fromPartial(object: { + code?: number; + log?: string; + info?: string; + index?: string | number | import("long").Long; + key?: Uint8Array; + value?: Uint8Array; + proofOps?: { + ops?: { + type?: string; + key?: Uint8Array; + data?: Uint8Array; + }[]; + }; + height?: string | number | import("long").Long; + codespace?: string; + }): _134.ResponseQuery; + }; + ResponseBeginBlock: { + encode(message: _134.ResponseBeginBlock, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseBeginBlock; + fromPartial(object: { + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + }): _134.ResponseBeginBlock; + }; + ResponseCheckTx: { + encode(message: _134.ResponseCheckTx, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseCheckTx; + fromPartial(object: { + code?: number; + data?: Uint8Array; + log?: string; + info?: string; + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + codespace?: string; + }): _134.ResponseCheckTx; + }; + ResponseDeliverTx: { + encode(message: _134.ResponseDeliverTx, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseDeliverTx; + fromPartial(object: { + code?: number; + data?: Uint8Array; + log?: string; + info?: string; + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + codespace?: string; + }): _134.ResponseDeliverTx; + }; + ResponseEndBlock: { + encode(message: _134.ResponseEndBlock, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseEndBlock; + fromPartial(object: { + validatorUpdates?: { + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + power?: string | number | import("long").Long; + }[]; + consensusParamUpdates?: { + block?: { + maxBytes?: string | number | import("long").Long; + maxGas?: string | number | import("long").Long; + }; + evidence?: { + maxAgeNumBlocks?: string | number | import("long").Long; + maxAgeDuration?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxBytes?: string | number | import("long").Long; + }; + validator?: { + pubKeyTypes?: string[]; + }; + version?: { + appVersion?: string | number | import("long").Long; + }; + }; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + }): _134.ResponseEndBlock; + }; + ResponseCommit: { + encode(message: _134.ResponseCommit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseCommit; + fromPartial(object: { + data?: Uint8Array; + retainHeight?: string | number | import("long").Long; + }): _134.ResponseCommit; + }; + ResponseListSnapshots: { + encode(message: _134.ResponseListSnapshots, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseListSnapshots; + fromPartial(object: { + snapshots?: { + height?: string | number | import("long").Long; + format?: number; + chunks?: number; + hash?: Uint8Array; + metadata?: Uint8Array; + }[]; + }): _134.ResponseListSnapshots; + }; + ResponseOfferSnapshot: { + encode(message: _134.ResponseOfferSnapshot, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseOfferSnapshot; + fromPartial(object: { + result?: _134.ResponseOfferSnapshot_Result; + }): _134.ResponseOfferSnapshot; + }; + ResponseLoadSnapshotChunk: { + encode(message: _134.ResponseLoadSnapshotChunk, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseLoadSnapshotChunk; + fromPartial(object: { + chunk?: Uint8Array; + }): _134.ResponseLoadSnapshotChunk; + }; + ResponseApplySnapshotChunk: { + encode(message: _134.ResponseApplySnapshotChunk, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ResponseApplySnapshotChunk; + fromPartial(object: { + result?: _134.ResponseApplySnapshotChunk_Result; + refetchChunks?: number[]; + rejectSenders?: string[]; + }): _134.ResponseApplySnapshotChunk; + }; + ConsensusParams: { + encode(message: _134.ConsensusParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ConsensusParams; + fromPartial(object: { + block?: { + maxBytes?: string | number | import("long").Long; + maxGas?: string | number | import("long").Long; + }; + evidence?: { + maxAgeNumBlocks?: string | number | import("long").Long; + maxAgeDuration?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxBytes?: string | number | import("long").Long; + }; + validator?: { + pubKeyTypes?: string[]; + }; + version?: { + appVersion?: string | number | import("long").Long; + }; + }): _134.ConsensusParams; + }; + BlockParams: { + encode(message: _134.BlockParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.BlockParams; + fromPartial(object: { + maxBytes?: string | number | import("long").Long; + maxGas?: string | number | import("long").Long; + }): _134.BlockParams; + }; + LastCommitInfo: { + encode(message: _134.LastCommitInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.LastCommitInfo; + fromPartial(object: { + round?: number; + votes?: { + validator?: { + address?: Uint8Array; + power?: string | number | import("long").Long; + }; + signedLastBlock?: boolean; + }[]; + }): _134.LastCommitInfo; + }; + Event: { + encode(message: _134.Event, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.Event; + fromPartial(object: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }): _134.Event; + }; + EventAttribute: { + encode(message: _134.EventAttribute, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.EventAttribute; + fromPartial(object: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }): _134.EventAttribute; + }; + TxResult: { + encode(message: _134.TxResult, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.TxResult; + fromPartial(object: { + height?: string | number | import("long").Long; + index?: number; + tx?: Uint8Array; + result?: { + code?: number; + data?: Uint8Array; + log?: string; + info?: string; + gasWanted?: string | number | import("long").Long; + gasUsed?: string | number | import("long").Long; + events?: { + type?: string; + attributes?: { + key?: Uint8Array; + value?: Uint8Array; + index?: boolean; + }[]; + }[]; + codespace?: string; + }; + }): _134.TxResult; + }; + Validator: { + encode(message: _134.Validator, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.Validator; + fromPartial(object: { + address?: Uint8Array; + power?: string | number | import("long").Long; + }): _134.Validator; + }; + ValidatorUpdate: { + encode(message: _134.ValidatorUpdate, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.ValidatorUpdate; + fromPartial(object: { + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + power?: string | number | import("long").Long; + }): _134.ValidatorUpdate; + }; + VoteInfo: { + encode(message: _134.VoteInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.VoteInfo; + fromPartial(object: { + validator?: { + address?: Uint8Array; + power?: string | number | import("long").Long; + }; + signedLastBlock?: boolean; + }): _134.VoteInfo; + }; + Evidence: { + encode(message: _134.Evidence, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.Evidence; + fromPartial(object: { + type?: _134.EvidenceType; + validator?: { + address?: Uint8Array; + power?: string | number | import("long").Long; + }; + height?: string | number | import("long").Long; + time?: Date; + totalVotingPower?: string | number | import("long").Long; + }): _134.Evidence; + }; + Snapshot: { + encode(message: _134.Snapshot, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _134.Snapshot; + fromPartial(object: { + height?: string | number | import("long").Long; + format?: number; + chunks?: number; + hash?: Uint8Array; + metadata?: Uint8Array; + }): _134.Snapshot; + }; + }; + const crypto: { + Proof: { + encode(message: _136.Proof, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _136.Proof; + fromPartial(object: { + total?: string | number | import("long").Long; + index?: string | number | import("long").Long; + leafHash?: Uint8Array; + aunts?: Uint8Array[]; + }): _136.Proof; + }; + ValueOp: { + encode(message: _136.ValueOp, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _136.ValueOp; + fromPartial(object: { + key?: Uint8Array; + proof?: { + total?: string | number | import("long").Long; + index?: string | number | import("long").Long; + leafHash?: Uint8Array; + aunts?: Uint8Array[]; + }; + }): _136.ValueOp; + }; + DominoOp: { + encode(message: _136.DominoOp, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _136.DominoOp; + fromPartial(object: { + key?: string; + input?: string; + output?: string; + }): _136.DominoOp; + }; + ProofOp: { + encode(message: _136.ProofOp, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _136.ProofOp; + fromPartial(object: { + type?: string; + key?: Uint8Array; + data?: Uint8Array; + }): _136.ProofOp; + }; + ProofOps: { + encode(message: _136.ProofOps, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _136.ProofOps; + fromPartial(object: { + ops?: { + type?: string; + key?: Uint8Array; + data?: Uint8Array; + }[]; + }): _136.ProofOps; + }; + PublicKey: { + encode(message: _135.PublicKey, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _135.PublicKey; + fromPartial(object: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }): _135.PublicKey; + }; + }; + namespace libs { + const bits: { + BitArray: { + encode(message: _137.BitArray, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _137.BitArray; + fromPartial(object: { + bits?: string | number | import("long").Long; + elems?: (string | number | import("long").Long)[]; + }): _137.BitArray; + }; + }; + } + const p2p: { + ProtocolVersion: { + encode(message: _138.ProtocolVersion, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _138.ProtocolVersion; + fromPartial(object: { + p2p?: string | number | import("long").Long; + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }): _138.ProtocolVersion; + }; + NodeInfo: { + encode(message: _138.NodeInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _138.NodeInfo; + fromPartial(object: { + protocolVersion?: { + p2p?: string | number | import("long").Long; + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + nodeId?: string; + listenAddr?: string; + network?: string; + version?: string; + channels?: Uint8Array; + moniker?: string; + other?: { + txIndex?: string; + rpcAddress?: string; + }; + }): _138.NodeInfo; + }; + NodeInfoOther: { + encode(message: _138.NodeInfoOther, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _138.NodeInfoOther; + fromPartial(object: { + txIndex?: string; + rpcAddress?: string; + }): _138.NodeInfoOther; + }; + PeerInfo: { + encode(message: _138.PeerInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _138.PeerInfo; + fromPartial(object: { + id?: string; + addressInfo?: { + address?: string; + lastDialSuccess?: Date; + lastDialFailure?: Date; + dialFailures?: number; + }[]; + lastConnected?: Date; + }): _138.PeerInfo; + }; + PeerAddressInfo: { + encode(message: _138.PeerAddressInfo, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _138.PeerAddressInfo; + fromPartial(object: { + address?: string; + lastDialSuccess?: Date; + lastDialFailure?: Date; + dialFailures?: number; + }): _138.PeerAddressInfo; + }; + }; + const types: { + ValidatorSet: { + encode(message: _143.ValidatorSet, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _143.ValidatorSet; + fromPartial(object: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }): _143.ValidatorSet; + }; + Validator: { + encode(message: _143.Validator, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _143.Validator; + fromPartial(object: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }): _143.Validator; + }; + SimpleValidator: { + encode(message: _143.SimpleValidator, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _143.SimpleValidator; + fromPartial(object: { + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + }): _143.SimpleValidator; + }; + blockIDFlagFromJSON(object: any): _142.BlockIDFlag; + blockIDFlagToJSON(object: _142.BlockIDFlag): string; + signedMsgTypeFromJSON(object: any): _142.SignedMsgType; + signedMsgTypeToJSON(object: _142.SignedMsgType): string; + BlockIDFlag: typeof _142.BlockIDFlag; + BlockIDFlagSDKType: typeof _142.BlockIDFlag; + SignedMsgType: typeof _142.SignedMsgType; + SignedMsgTypeSDKType: typeof _142.SignedMsgType; + PartSetHeader: { + encode(message: _142.PartSetHeader, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.PartSetHeader; + fromPartial(object: { + total?: number; + hash?: Uint8Array; + }): _142.PartSetHeader; + }; + Part: { + encode(message: _142.Part, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.Part; + fromPartial(object: { + index?: number; + bytes?: Uint8Array; + proof?: { + total?: string | number | import("long").Long; + index?: string | number | import("long").Long; + leafHash?: Uint8Array; + aunts?: Uint8Array[]; + }; + }): _142.Part; + }; + BlockID: { + encode(message: _142.BlockID, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.BlockID; + fromPartial(object: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }): _142.BlockID; + }; + Header: { + encode(message: _142.Header, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.Header; + fromPartial(object: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }): _142.Header; + }; + Data: { + encode(message: _142.Data, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.Data; + fromPartial(object: { + txs?: Uint8Array[]; + }): _142.Data; + }; + Vote: { + encode(message: _142.Vote, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.Vote; + fromPartial(object: { + type?: _142.SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }): _142.Vote; + }; + Commit: { + encode(message: _142.Commit, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.Commit; + fromPartial(object: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: _142.BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }): _142.Commit; + }; + CommitSig: { + encode(message: _142.CommitSig, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.CommitSig; + fromPartial(object: { + blockIdFlag?: _142.BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }): _142.CommitSig; + }; + Proposal: { + encode(message: _142.Proposal, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.Proposal; + fromPartial(object: { + type?: _142.SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + polRound?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + signature?: Uint8Array; + }): _142.Proposal; + }; + SignedHeader: { + encode(message: _142.SignedHeader, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.SignedHeader; + fromPartial(object: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + commit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: _142.BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }): _142.SignedHeader; + }; + LightBlock: { + encode(message: _142.LightBlock, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.LightBlock; + fromPartial(object: { + signedHeader?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + commit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: _142.BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + validatorSet?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + }): _142.LightBlock; + }; + BlockMeta: { + encode(message: _142.BlockMeta, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.BlockMeta; + fromPartial(object: { + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + blockSize?: string | number | import("long").Long; + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + numTxs?: string | number | import("long").Long; + }): _142.BlockMeta; + }; + TxProof: { + encode(message: _142.TxProof, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _142.TxProof; + fromPartial(object: { + rootHash?: Uint8Array; + data?: Uint8Array; + proof?: { + total?: string | number | import("long").Long; + index?: string | number | import("long").Long; + leafHash?: Uint8Array; + aunts?: Uint8Array[]; + }; + }): _142.TxProof; + }; + ConsensusParams: { + encode(message: _141.ConsensusParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _141.ConsensusParams; + fromPartial(object: { + block?: { + maxBytes?: string | number | import("long").Long; + maxGas?: string | number | import("long").Long; + timeIotaMs?: string | number | import("long").Long; + }; + evidence?: { + maxAgeNumBlocks?: string | number | import("long").Long; + maxAgeDuration?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxBytes?: string | number | import("long").Long; + }; + validator?: { + pubKeyTypes?: string[]; + }; + version?: { + appVersion?: string | number | import("long").Long; + }; + }): _141.ConsensusParams; + }; + BlockParams: { + encode(message: _141.BlockParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _141.BlockParams; + fromPartial(object: { + maxBytes?: string | number | import("long").Long; + maxGas?: string | number | import("long").Long; + timeIotaMs?: string | number | import("long").Long; + }): _141.BlockParams; + }; + EvidenceParams: { + encode(message: _141.EvidenceParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _141.EvidenceParams; + fromPartial(object: { + maxAgeNumBlocks?: string | number | import("long").Long; + maxAgeDuration?: { + seconds?: string | number | import("long").Long; + nanos?: number; + }; + maxBytes?: string | number | import("long").Long; + }): _141.EvidenceParams; + }; + ValidatorParams: { + encode(message: _141.ValidatorParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _141.ValidatorParams; + fromPartial(object: { + pubKeyTypes?: string[]; + }): _141.ValidatorParams; + }; + VersionParams: { + encode(message: _141.VersionParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _141.VersionParams; + fromPartial(object: { + appVersion?: string | number | import("long").Long; + }): _141.VersionParams; + }; + HashedParams: { + encode(message: _141.HashedParams, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _141.HashedParams; + fromPartial(object: { + blockMaxBytes?: string | number | import("long").Long; + blockMaxGas?: string | number | import("long").Long; + }): _141.HashedParams; + }; + Evidence: { + encode(message: _140.Evidence, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _140.Evidence; + fromPartial(object: { + duplicateVoteEvidence?: { + voteA?: { + type?: _142.SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + voteB?: { + type?: _142.SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + totalVotingPower?: string | number | import("long").Long; + validatorPower?: string | number | import("long").Long; + timestamp?: Date; + }; + lightClientAttackEvidence?: { + conflictingBlock?: { + signedHeader?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + commit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: _142.BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + validatorSet?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + }; + commonHeight?: string | number | import("long").Long; + byzantineValidators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + totalVotingPower?: string | number | import("long").Long; + timestamp?: Date; + }; + }): _140.Evidence; + }; + DuplicateVoteEvidence: { + encode(message: _140.DuplicateVoteEvidence, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _140.DuplicateVoteEvidence; + fromPartial(object: { + voteA?: { + type?: _142.SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + voteB?: { + type?: _142.SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + totalVotingPower?: string | number | import("long").Long; + validatorPower?: string | number | import("long").Long; + timestamp?: Date; + }): _140.DuplicateVoteEvidence; + }; + LightClientAttackEvidence: { + encode(message: _140.LightClientAttackEvidence, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _140.LightClientAttackEvidence; + fromPartial(object: { + conflictingBlock?: { + signedHeader?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + commit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: _142.BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + validatorSet?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + }; + commonHeight?: string | number | import("long").Long; + byzantineValidators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + totalVotingPower?: string | number | import("long").Long; + timestamp?: Date; + }): _140.LightClientAttackEvidence; + }; + EvidenceList: { + encode(message: _140.EvidenceList, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _140.EvidenceList; + fromPartial(object: { + evidence?: { + duplicateVoteEvidence?: { + voteA?: { + type?: _142.SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + voteB?: { + type?: _142.SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + totalVotingPower?: string | number | import("long").Long; + validatorPower?: string | number | import("long").Long; + timestamp?: Date; + }; + lightClientAttackEvidence?: { + conflictingBlock?: { + signedHeader?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + commit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: _142.BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + validatorSet?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + }; + commonHeight?: string | number | import("long").Long; + byzantineValidators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + totalVotingPower?: string | number | import("long").Long; + timestamp?: Date; + }; + }[]; + }): _140.EvidenceList; + }; + Block: { + encode(message: _139.Block, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _139.Block; + fromPartial(object: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + data?: { + txs?: Uint8Array[]; + }; + evidence?: { + evidence?: { + duplicateVoteEvidence?: { + voteA?: { + type?: _142.SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + voteB?: { + type?: _142.SignedMsgType; + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + timestamp?: Date; + validatorAddress?: Uint8Array; + validatorIndex?: number; + signature?: Uint8Array; + }; + totalVotingPower?: string | number | import("long").Long; + validatorPower?: string | number | import("long").Long; + timestamp?: Date; + }; + lightClientAttackEvidence?: { + conflictingBlock?: { + signedHeader?: { + header?: { + version?: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }; + chainId?: string; + height?: string | number | import("long").Long; + time?: Date; + lastBlockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + lastCommitHash?: Uint8Array; + dataHash?: Uint8Array; + validatorsHash?: Uint8Array; + nextValidatorsHash?: Uint8Array; + consensusHash?: Uint8Array; + appHash?: Uint8Array; + lastResultsHash?: Uint8Array; + evidenceHash?: Uint8Array; + proposerAddress?: Uint8Array; + }; + commit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: _142.BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }; + validatorSet?: { + validators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + proposer?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }; + totalVotingPower?: string | number | import("long").Long; + }; + }; + commonHeight?: string | number | import("long").Long; + byzantineValidators?: { + address?: Uint8Array; + pubKey?: { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; + }; + votingPower?: string | number | import("long").Long; + proposerPriority?: string | number | import("long").Long; + }[]; + totalVotingPower?: string | number | import("long").Long; + timestamp?: Date; + }; + }[]; + }; + lastCommit?: { + height?: string | number | import("long").Long; + round?: number; + blockId?: { + hash?: Uint8Array; + partSetHeader?: { + total?: number; + hash?: Uint8Array; + }; + }; + signatures?: { + blockIdFlag?: _142.BlockIDFlag; + validatorAddress?: Uint8Array; + timestamp?: Date; + signature?: Uint8Array; + }[]; + }; + }): _139.Block; + }; + }; + const version: { + App: { + encode(message: _144.App, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _144.App; + fromPartial(object: { + protocol?: string | number | import("long").Long; + software?: string; + }): _144.App; + }; + Consensus: { + encode(message: _144.Consensus, writer?: import("protobufjs").Writer): import("protobufjs").Writer; + decode(input: Uint8Array | import("protobufjs").Reader, length?: number): _144.Consensus; + fromPartial(object: { + block?: string | number | import("long").Long; + app?: string | number | import("long").Long; + }): _144.Consensus; + }; + }; +} diff --git a/packages/codegen/dist/tendermint/crypto/keys.d.ts b/packages/codegen/dist/tendermint/crypto/keys.d.ts new file mode 100644 index 00000000..00a0cb4f --- /dev/null +++ b/packages/codegen/dist/tendermint/crypto/keys.d.ts @@ -0,0 +1,17 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** PublicKey defines the keys available for use with Tendermint Validators */ +export interface PublicKey { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; +} +/** PublicKey defines the keys available for use with Tendermint Validators */ +export interface PublicKeySDKType { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; +} +export declare const PublicKey: { + encode(message: PublicKey, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PublicKey; + fromPartial(object: DeepPartial): PublicKey; +}; diff --git a/packages/codegen/dist/tendermint/crypto/proof.d.ts b/packages/codegen/dist/tendermint/crypto/proof.d.ts new file mode 100644 index 00000000..44de5a80 --- /dev/null +++ b/packages/codegen/dist/tendermint/crypto/proof.d.ts @@ -0,0 +1,88 @@ +/// +import { Long, DeepPartial } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +export interface Proof { + total: Long; + index: Long; + leafHash: Uint8Array; + aunts: Uint8Array[]; +} +export interface ProofSDKType { + total: Long; + index: Long; + leaf_hash: Uint8Array; + aunts: Uint8Array[]; +} +export interface ValueOp { + /** Encoded in ProofOp.Key. */ + key: Uint8Array; + /** To encode in ProofOp.Data */ + proof?: Proof; +} +export interface ValueOpSDKType { + key: Uint8Array; + proof?: ProofSDKType; +} +export interface DominoOp { + key: string; + input: string; + output: string; +} +export interface DominoOpSDKType { + key: string; + input: string; + output: string; +} +/** + * ProofOp defines an operation used for calculating Merkle root + * The data could be arbitrary format, providing nessecary data + * for example neighbouring node hash + */ +export interface ProofOp { + type: string; + key: Uint8Array; + data: Uint8Array; +} +/** + * ProofOp defines an operation used for calculating Merkle root + * The data could be arbitrary format, providing nessecary data + * for example neighbouring node hash + */ +export interface ProofOpSDKType { + type: string; + key: Uint8Array; + data: Uint8Array; +} +/** ProofOps is Merkle proof defined by the list of ProofOps */ +export interface ProofOps { + ops: ProofOp[]; +} +/** ProofOps is Merkle proof defined by the list of ProofOps */ +export interface ProofOpsSDKType { + ops: ProofOpSDKType[]; +} +export declare const Proof: { + encode(message: Proof, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Proof; + fromPartial(object: DeepPartial): Proof; +}; +export declare const ValueOp: { + encode(message: ValueOp, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValueOp; + fromPartial(object: DeepPartial): ValueOp; +}; +export declare const DominoOp: { + encode(message: DominoOp, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DominoOp; + fromPartial(object: DeepPartial): DominoOp; +}; +export declare const ProofOp: { + encode(message: ProofOp, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ProofOp; + fromPartial(object: DeepPartial): ProofOp; +}; +export declare const ProofOps: { + encode(message: ProofOps, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ProofOps; + fromPartial(object: DeepPartial): ProofOps; +}; diff --git a/packages/codegen/dist/tendermint/libs/bits/types.d.ts b/packages/codegen/dist/tendermint/libs/bits/types.d.ts new file mode 100644 index 00000000..0c0b2c4c --- /dev/null +++ b/packages/codegen/dist/tendermint/libs/bits/types.d.ts @@ -0,0 +1,16 @@ +/// +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +export interface BitArray { + bits: Long; + elems: Long[]; +} +export interface BitArraySDKType { + bits: Long; + elems: Long[]; +} +export declare const BitArray: { + encode(message: BitArray, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BitArray; + fromPartial(object: DeepPartial): BitArray; +}; diff --git a/packages/codegen/dist/tendermint/p2p/types.d.ts b/packages/codegen/dist/tendermint/p2p/types.d.ts new file mode 100644 index 00000000..542f5dc6 --- /dev/null +++ b/packages/codegen/dist/tendermint/p2p/types.d.ts @@ -0,0 +1,88 @@ +/// +import { Long, DeepPartial } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +export interface ProtocolVersion { + p2p: Long; + block: Long; + app: Long; +} +export interface ProtocolVersionSDKType { + p2p: Long; + block: Long; + app: Long; +} +export interface NodeInfo { + protocolVersion?: ProtocolVersion; + nodeId: string; + listenAddr: string; + network: string; + version: string; + channels: Uint8Array; + moniker: string; + other?: NodeInfoOther; +} +export interface NodeInfoSDKType { + protocol_version?: ProtocolVersionSDKType; + node_id: string; + listen_addr: string; + network: string; + version: string; + channels: Uint8Array; + moniker: string; + other?: NodeInfoOtherSDKType; +} +export interface NodeInfoOther { + txIndex: string; + rpcAddress: string; +} +export interface NodeInfoOtherSDKType { + tx_index: string; + rpc_address: string; +} +export interface PeerInfo { + id: string; + addressInfo: PeerAddressInfo[]; + lastConnected?: Date; +} +export interface PeerInfoSDKType { + id: string; + address_info: PeerAddressInfoSDKType[]; + last_connected?: Date; +} +export interface PeerAddressInfo { + address: string; + lastDialSuccess?: Date; + lastDialFailure?: Date; + dialFailures: number; +} +export interface PeerAddressInfoSDKType { + address: string; + last_dial_success?: Date; + last_dial_failure?: Date; + dial_failures: number; +} +export declare const ProtocolVersion: { + encode(message: ProtocolVersion, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ProtocolVersion; + fromPartial(object: DeepPartial): ProtocolVersion; +}; +export declare const NodeInfo: { + encode(message: NodeInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): NodeInfo; + fromPartial(object: DeepPartial): NodeInfo; +}; +export declare const NodeInfoOther: { + encode(message: NodeInfoOther, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): NodeInfoOther; + fromPartial(object: DeepPartial): NodeInfoOther; +}; +export declare const PeerInfo: { + encode(message: PeerInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PeerInfo; + fromPartial(object: DeepPartial): PeerInfo; +}; +export declare const PeerAddressInfo: { + encode(message: PeerAddressInfo, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PeerAddressInfo; + fromPartial(object: DeepPartial): PeerAddressInfo; +}; diff --git a/packages/codegen/dist/tendermint/types/block.d.ts b/packages/codegen/dist/tendermint/types/block.d.ts new file mode 100644 index 00000000..68402646 --- /dev/null +++ b/packages/codegen/dist/tendermint/types/block.d.ts @@ -0,0 +1,21 @@ +import { Header, HeaderSDKType, Data, DataSDKType, Commit, CommitSDKType } from "./types"; +import { EvidenceList, EvidenceListSDKType } from "./evidence"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +export interface Block { + header?: Header; + data?: Data; + evidence?: EvidenceList; + lastCommit?: Commit; +} +export interface BlockSDKType { + header?: HeaderSDKType; + data?: DataSDKType; + evidence?: EvidenceListSDKType; + last_commit?: CommitSDKType; +} +export declare const Block: { + encode(message: Block, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Block; + fromPartial(object: DeepPartial): Block; +}; diff --git a/packages/codegen/dist/tendermint/types/evidence.d.ts b/packages/codegen/dist/tendermint/types/evidence.d.ts new file mode 100644 index 00000000..461bbc59 --- /dev/null +++ b/packages/codegen/dist/tendermint/types/evidence.d.ts @@ -0,0 +1,71 @@ +/// +import { Vote, VoteSDKType, LightBlock, LightBlockSDKType } from "./types"; +import { Validator, ValidatorSDKType } from "./validator"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +export interface Evidence { + duplicateVoteEvidence?: DuplicateVoteEvidence; + lightClientAttackEvidence?: LightClientAttackEvidence; +} +export interface EvidenceSDKType { + duplicate_vote_evidence?: DuplicateVoteEvidenceSDKType; + light_client_attack_evidence?: LightClientAttackEvidenceSDKType; +} +/** DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. */ +export interface DuplicateVoteEvidence { + voteA?: Vote; + voteB?: Vote; + totalVotingPower: Long; + validatorPower: Long; + timestamp?: Date; +} +/** DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. */ +export interface DuplicateVoteEvidenceSDKType { + vote_a?: VoteSDKType; + vote_b?: VoteSDKType; + total_voting_power: Long; + validator_power: Long; + timestamp?: Date; +} +/** LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. */ +export interface LightClientAttackEvidence { + conflictingBlock?: LightBlock; + commonHeight: Long; + byzantineValidators: Validator[]; + totalVotingPower: Long; + timestamp?: Date; +} +/** LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. */ +export interface LightClientAttackEvidenceSDKType { + conflicting_block?: LightBlockSDKType; + common_height: Long; + byzantine_validators: ValidatorSDKType[]; + total_voting_power: Long; + timestamp?: Date; +} +export interface EvidenceList { + evidence: Evidence[]; +} +export interface EvidenceListSDKType { + evidence: EvidenceSDKType[]; +} +export declare const Evidence: { + encode(message: Evidence, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Evidence; + fromPartial(object: DeepPartial): Evidence; +}; +export declare const DuplicateVoteEvidence: { + encode(message: DuplicateVoteEvidence, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): DuplicateVoteEvidence; + fromPartial(object: DeepPartial): DuplicateVoteEvidence; +}; +export declare const LightClientAttackEvidence: { + encode(message: LightClientAttackEvidence, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): LightClientAttackEvidence; + fromPartial(object: DeepPartial): LightClientAttackEvidence; +}; +export declare const EvidenceList: { + encode(message: EvidenceList, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EvidenceList; + fromPartial(object: DeepPartial): EvidenceList; +}; diff --git a/packages/codegen/dist/tendermint/types/params.d.ts b/packages/codegen/dist/tendermint/types/params.d.ts new file mode 100644 index 00000000..a6608d49 --- /dev/null +++ b/packages/codegen/dist/tendermint/types/params.d.ts @@ -0,0 +1,150 @@ +/// +import { Duration, DurationSDKType } from "../../google/protobuf/duration"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +/** + * ConsensusParams contains consensus critical parameters that determine the + * validity of blocks. + */ +export interface ConsensusParams { + block?: BlockParams; + evidence?: EvidenceParams; + validator?: ValidatorParams; + version?: VersionParams; +} +/** + * ConsensusParams contains consensus critical parameters that determine the + * validity of blocks. + */ +export interface ConsensusParamsSDKType { + block?: BlockParamsSDKType; + evidence?: EvidenceParamsSDKType; + validator?: ValidatorParamsSDKType; + version?: VersionParamsSDKType; +} +/** BlockParams contains limits on the block size. */ +export interface BlockParams { + /** + * Max block size, in bytes. + * Note: must be greater than 0 + */ + maxBytes: Long; + /** + * Max gas per block. + * Note: must be greater or equal to -1 + */ + maxGas: Long; + /** + * Minimum time increment between consecutive blocks (in milliseconds) If the + * block header timestamp is ahead of the system clock, decrease this value. + * + * Not exposed to the application. + */ + timeIotaMs: Long; +} +/** BlockParams contains limits on the block size. */ +export interface BlockParamsSDKType { + max_bytes: Long; + max_gas: Long; + time_iota_ms: Long; +} +/** EvidenceParams determine how we handle evidence of malfeasance. */ +export interface EvidenceParams { + /** + * Max age of evidence, in blocks. + * + * The basic formula for calculating this is: MaxAgeDuration / {average block + * time}. + */ + maxAgeNumBlocks: Long; + /** + * Max age of evidence, in time. + * + * It should correspond with an app's "unbonding period" or other similar + * mechanism for handling [Nothing-At-Stake + * attacks](https://github.com/ethereum/wiki/wiki/Proof-of-Stake-FAQ#what-is-the-nothing-at-stake-problem-and-how-can-it-be-fixed). + */ + maxAgeDuration?: Duration; + /** + * This sets the maximum size of total evidence in bytes that can be committed in a single block. + * and should fall comfortably under the max block bytes. + * Default is 1048576 or 1MB + */ + maxBytes: Long; +} +/** EvidenceParams determine how we handle evidence of malfeasance. */ +export interface EvidenceParamsSDKType { + max_age_num_blocks: Long; + max_age_duration?: DurationSDKType; + max_bytes: Long; +} +/** + * ValidatorParams restrict the public key types validators can use. + * NOTE: uses ABCI pubkey naming, not Amino names. + */ +export interface ValidatorParams { + pubKeyTypes: string[]; +} +/** + * ValidatorParams restrict the public key types validators can use. + * NOTE: uses ABCI pubkey naming, not Amino names. + */ +export interface ValidatorParamsSDKType { + pub_key_types: string[]; +} +/** VersionParams contains the ABCI application version. */ +export interface VersionParams { + appVersion: Long; +} +/** VersionParams contains the ABCI application version. */ +export interface VersionParamsSDKType { + app_version: Long; +} +/** + * HashedParams is a subset of ConsensusParams. + * + * It is hashed into the Header.ConsensusHash. + */ +export interface HashedParams { + blockMaxBytes: Long; + blockMaxGas: Long; +} +/** + * HashedParams is a subset of ConsensusParams. + * + * It is hashed into the Header.ConsensusHash. + */ +export interface HashedParamsSDKType { + block_max_bytes: Long; + block_max_gas: Long; +} +export declare const ConsensusParams: { + encode(message: ConsensusParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusParams; + fromPartial(object: DeepPartial): ConsensusParams; +}; +export declare const BlockParams: { + encode(message: BlockParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BlockParams; + fromPartial(object: DeepPartial): BlockParams; +}; +export declare const EvidenceParams: { + encode(message: EvidenceParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): EvidenceParams; + fromPartial(object: DeepPartial): EvidenceParams; +}; +export declare const ValidatorParams: { + encode(message: ValidatorParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorParams; + fromPartial(object: DeepPartial): ValidatorParams; +}; +export declare const VersionParams: { + encode(message: VersionParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): VersionParams; + fromPartial(object: DeepPartial): VersionParams; +}; +export declare const HashedParams: { + encode(message: HashedParams, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): HashedParams; + fromPartial(object: DeepPartial): HashedParams; +}; diff --git a/packages/codegen/dist/tendermint/types/types.d.ts b/packages/codegen/dist/tendermint/types/types.d.ts new file mode 100644 index 00000000..e55d4cd4 --- /dev/null +++ b/packages/codegen/dist/tendermint/types/types.d.ts @@ -0,0 +1,296 @@ +/// +import { Proof, ProofSDKType } from "../crypto/proof"; +import { Consensus, ConsensusSDKType } from "../version/types"; +import { ValidatorSet, ValidatorSetSDKType } from "./validator"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +/** BlockIdFlag indicates which BlcokID the signature is for */ +export declare enum BlockIDFlag { + BLOCK_ID_FLAG_UNKNOWN = 0, + BLOCK_ID_FLAG_ABSENT = 1, + BLOCK_ID_FLAG_COMMIT = 2, + BLOCK_ID_FLAG_NIL = 3, + UNRECOGNIZED = -1 +} +export declare const BlockIDFlagSDKType: typeof BlockIDFlag; +export declare function blockIDFlagFromJSON(object: any): BlockIDFlag; +export declare function blockIDFlagToJSON(object: BlockIDFlag): string; +/** SignedMsgType is a type of signed message in the consensus. */ +export declare enum SignedMsgType { + SIGNED_MSG_TYPE_UNKNOWN = 0, + /** SIGNED_MSG_TYPE_PREVOTE - Votes */ + SIGNED_MSG_TYPE_PREVOTE = 1, + SIGNED_MSG_TYPE_PRECOMMIT = 2, + /** SIGNED_MSG_TYPE_PROPOSAL - Proposals */ + SIGNED_MSG_TYPE_PROPOSAL = 32, + UNRECOGNIZED = -1 +} +export declare const SignedMsgTypeSDKType: typeof SignedMsgType; +export declare function signedMsgTypeFromJSON(object: any): SignedMsgType; +export declare function signedMsgTypeToJSON(object: SignedMsgType): string; +/** PartsetHeader */ +export interface PartSetHeader { + total: number; + hash: Uint8Array; +} +/** PartsetHeader */ +export interface PartSetHeaderSDKType { + total: number; + hash: Uint8Array; +} +export interface Part { + index: number; + bytes: Uint8Array; + proof?: Proof; +} +export interface PartSDKType { + index: number; + bytes: Uint8Array; + proof?: ProofSDKType; +} +/** BlockID */ +export interface BlockID { + hash: Uint8Array; + partSetHeader?: PartSetHeader; +} +/** BlockID */ +export interface BlockIDSDKType { + hash: Uint8Array; + part_set_header?: PartSetHeaderSDKType; +} +/** Header defines the structure of a Tendermint block header. */ +export interface Header { + /** basic block info */ + version?: Consensus; + chainId: string; + height: Long; + time?: Date; + /** prev block info */ + lastBlockId?: BlockID; + /** hashes of block data */ + lastCommitHash: Uint8Array; + dataHash: Uint8Array; + /** hashes from the app output from the prev block */ + validatorsHash: Uint8Array; + /** validators for the next block */ + nextValidatorsHash: Uint8Array; + /** consensus params for current block */ + consensusHash: Uint8Array; + /** state after txs from the previous block */ + appHash: Uint8Array; + lastResultsHash: Uint8Array; + /** consensus info */ + evidenceHash: Uint8Array; + /** original proposer of the block */ + proposerAddress: Uint8Array; +} +/** Header defines the structure of a Tendermint block header. */ +export interface HeaderSDKType { + version?: ConsensusSDKType; + chain_id: string; + height: Long; + time?: Date; + last_block_id?: BlockIDSDKType; + last_commit_hash: Uint8Array; + data_hash: Uint8Array; + validators_hash: Uint8Array; + next_validators_hash: Uint8Array; + consensus_hash: Uint8Array; + app_hash: Uint8Array; + last_results_hash: Uint8Array; + evidence_hash: Uint8Array; + proposer_address: Uint8Array; +} +/** Data contains the set of transactions included in the block */ +export interface Data { + /** + * Txs that will be applied by state @ block.Height+1. + * NOTE: not all txs here are valid. We're just agreeing on the order first. + * This means that block.AppHash does not include these txs. + */ + txs: Uint8Array[]; +} +/** Data contains the set of transactions included in the block */ +export interface DataSDKType { + txs: Uint8Array[]; +} +/** + * Vote represents a prevote, precommit, or commit vote from validators for + * consensus. + */ +export interface Vote { + type: SignedMsgType; + height: Long; + round: number; + /** zero if vote is nil. */ + blockId?: BlockID; + timestamp?: Date; + validatorAddress: Uint8Array; + validatorIndex: number; + signature: Uint8Array; +} +/** + * Vote represents a prevote, precommit, or commit vote from validators for + * consensus. + */ +export interface VoteSDKType { + type: SignedMsgType; + height: Long; + round: number; + block_id?: BlockIDSDKType; + timestamp?: Date; + validator_address: Uint8Array; + validator_index: number; + signature: Uint8Array; +} +/** Commit contains the evidence that a block was committed by a set of validators. */ +export interface Commit { + height: Long; + round: number; + blockId?: BlockID; + signatures: CommitSig[]; +} +/** Commit contains the evidence that a block was committed by a set of validators. */ +export interface CommitSDKType { + height: Long; + round: number; + block_id?: BlockIDSDKType; + signatures: CommitSigSDKType[]; +} +/** CommitSig is a part of the Vote included in a Commit. */ +export interface CommitSig { + blockIdFlag: BlockIDFlag; + validatorAddress: Uint8Array; + timestamp?: Date; + signature: Uint8Array; +} +/** CommitSig is a part of the Vote included in a Commit. */ +export interface CommitSigSDKType { + block_id_flag: BlockIDFlag; + validator_address: Uint8Array; + timestamp?: Date; + signature: Uint8Array; +} +export interface Proposal { + type: SignedMsgType; + height: Long; + round: number; + polRound: number; + blockId?: BlockID; + timestamp?: Date; + signature: Uint8Array; +} +export interface ProposalSDKType { + type: SignedMsgType; + height: Long; + round: number; + pol_round: number; + block_id?: BlockIDSDKType; + timestamp?: Date; + signature: Uint8Array; +} +export interface SignedHeader { + header?: Header; + commit?: Commit; +} +export interface SignedHeaderSDKType { + header?: HeaderSDKType; + commit?: CommitSDKType; +} +export interface LightBlock { + signedHeader?: SignedHeader; + validatorSet?: ValidatorSet; +} +export interface LightBlockSDKType { + signed_header?: SignedHeaderSDKType; + validator_set?: ValidatorSetSDKType; +} +export interface BlockMeta { + blockId?: BlockID; + blockSize: Long; + header?: Header; + numTxs: Long; +} +export interface BlockMetaSDKType { + block_id?: BlockIDSDKType; + block_size: Long; + header?: HeaderSDKType; + num_txs: Long; +} +/** TxProof represents a Merkle proof of the presence of a transaction in the Merkle tree. */ +export interface TxProof { + rootHash: Uint8Array; + data: Uint8Array; + proof?: Proof; +} +/** TxProof represents a Merkle proof of the presence of a transaction in the Merkle tree. */ +export interface TxProofSDKType { + root_hash: Uint8Array; + data: Uint8Array; + proof?: ProofSDKType; +} +export declare const PartSetHeader: { + encode(message: PartSetHeader, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): PartSetHeader; + fromPartial(object: DeepPartial): PartSetHeader; +}; +export declare const Part: { + encode(message: Part, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Part; + fromPartial(object: DeepPartial): Part; +}; +export declare const BlockID: { + encode(message: BlockID, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BlockID; + fromPartial(object: DeepPartial): BlockID; +}; +export declare const Header: { + encode(message: Header, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Header; + fromPartial(object: DeepPartial
): Header; +}; +export declare const Data: { + encode(message: Data, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Data; + fromPartial(object: DeepPartial): Data; +}; +export declare const Vote: { + encode(message: Vote, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Vote; + fromPartial(object: DeepPartial): Vote; +}; +export declare const Commit: { + encode(message: Commit, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Commit; + fromPartial(object: DeepPartial): Commit; +}; +export declare const CommitSig: { + encode(message: CommitSig, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): CommitSig; + fromPartial(object: DeepPartial): CommitSig; +}; +export declare const Proposal: { + encode(message: Proposal, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Proposal; + fromPartial(object: DeepPartial): Proposal; +}; +export declare const SignedHeader: { + encode(message: SignedHeader, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SignedHeader; + fromPartial(object: DeepPartial): SignedHeader; +}; +export declare const LightBlock: { + encode(message: LightBlock, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): LightBlock; + fromPartial(object: DeepPartial): LightBlock; +}; +export declare const BlockMeta: { + encode(message: BlockMeta, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): BlockMeta; + fromPartial(object: DeepPartial): BlockMeta; +}; +export declare const TxProof: { + encode(message: TxProof, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): TxProof; + fromPartial(object: DeepPartial): TxProof; +}; diff --git a/packages/codegen/dist/tendermint/types/validator.d.ts b/packages/codegen/dist/tendermint/types/validator.d.ts new file mode 100644 index 00000000..5a0f01e5 --- /dev/null +++ b/packages/codegen/dist/tendermint/types/validator.d.ts @@ -0,0 +1,49 @@ +/// +import { PublicKey, PublicKeySDKType } from "../crypto/keys"; +import { Long, DeepPartial } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +export interface ValidatorSet { + validators: Validator[]; + proposer?: Validator; + totalVotingPower: Long; +} +export interface ValidatorSetSDKType { + validators: ValidatorSDKType[]; + proposer?: ValidatorSDKType; + total_voting_power: Long; +} +export interface Validator { + address: Uint8Array; + pubKey?: PublicKey; + votingPower: Long; + proposerPriority: Long; +} +export interface ValidatorSDKType { + address: Uint8Array; + pub_key?: PublicKeySDKType; + voting_power: Long; + proposer_priority: Long; +} +export interface SimpleValidator { + pubKey?: PublicKey; + votingPower: Long; +} +export interface SimpleValidatorSDKType { + pub_key?: PublicKeySDKType; + voting_power: Long; +} +export declare const ValidatorSet: { + encode(message: ValidatorSet, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorSet; + fromPartial(object: DeepPartial): ValidatorSet; +}; +export declare const Validator: { + encode(message: Validator, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Validator; + fromPartial(object: DeepPartial): Validator; +}; +export declare const SimpleValidator: { + encode(message: SimpleValidator, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): SimpleValidator; + fromPartial(object: DeepPartial): SimpleValidator; +}; diff --git a/packages/codegen/dist/tendermint/version/types.d.ts b/packages/codegen/dist/tendermint/version/types.d.ts new file mode 100644 index 00000000..c1896705 --- /dev/null +++ b/packages/codegen/dist/tendermint/version/types.d.ts @@ -0,0 +1,49 @@ +/// +import { Long, DeepPartial } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * App includes the protocol and software version for the application. + * This information is included in ResponseInfo. The App.Protocol can be + * updated in ResponseEndBlock. + */ +export interface App { + protocol: Long; + software: string; +} +/** + * App includes the protocol and software version for the application. + * This information is included in ResponseInfo. The App.Protocol can be + * updated in ResponseEndBlock. + */ +export interface AppSDKType { + protocol: Long; + software: string; +} +/** + * Consensus captures the consensus rules for processing a block in the blockchain, + * including all blockchain data structures and the rules of the application's + * state transition machine. + */ +export interface Consensus { + block: Long; + app: Long; +} +/** + * Consensus captures the consensus rules for processing a block in the blockchain, + * including all blockchain data structures and the rules of the application's + * state transition machine. + */ +export interface ConsensusSDKType { + block: Long; + app: Long; +} +export declare const App: { + encode(message: App, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): App; + fromPartial(object: DeepPartial): App; +}; +export declare const Consensus: { + encode(message: Consensus, writer?: _m0.Writer): _m0.Writer; + decode(input: _m0.Reader | Uint8Array, length?: number): Consensus; + fromPartial(object: DeepPartial): Consensus; +}; diff --git a/packages/codegen/package.json b/packages/codegen/package.json new file mode 100644 index 00000000..b93d43d1 --- /dev/null +++ b/packages/codegen/package.json @@ -0,0 +1,24 @@ +{ + "name": "@ping-pub/codegen", + "version": "1.0.0", + "description": "Libs genereted from protobuf by Telescope.", + "main": "dist/index.js", + "module": "src/index.ts", + "types": "dist/index.d.ts", + "repository": "https://github.com/ping-pub/explorer.git", + "author": "Ping Liang", + "license": "MIT", + "private": false, + "devDependencies": { + "@osmonauts/telescope": "^0.88.2", + "@protobufs/cosmos": "^0.1.0", + "@protobufs/cosmos_proto": "^0.0.10", + "@protobufs/cosmwasm": "^0.1.1", + "@protobufs/google": "^0.0.10", + "@protobufs/ibc": "^0.1.0" + }, + "scripts": { + "build": "tsc --module es2022 --project ./tsconfig.json ", + "lint": "eslint . --ext .vue,.js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix --ignore-path .gitignore" + } +} \ No newline at end of file diff --git a/packages/codegen/proto/amino/LICENSE b/packages/codegen/proto/amino/LICENSE new file mode 100644 index 00000000..063e03fc --- /dev/null +++ b/packages/codegen/proto/amino/LICENSE @@ -0,0 +1,204 @@ +Cosmos SDK +License: Apache2.0 + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2016 All in Bits, Inc + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/packages/codegen/proto/amino/README.md b/packages/codegen/proto/amino/README.md new file mode 100644 index 00000000..b0d34bbf --- /dev/null +++ b/packages/codegen/proto/amino/README.md @@ -0,0 +1 @@ +# amino \ No newline at end of file diff --git a/packages/codegen/proto/amino/amino.proto b/packages/codegen/proto/amino/amino.proto new file mode 100644 index 00000000..d01f1752 --- /dev/null +++ b/packages/codegen/proto/amino/amino.proto @@ -0,0 +1,79 @@ +syntax = "proto3"; + +package amino; + +import "google/protobuf/descriptor.proto"; + +// TODO(fdymylja): once we fully migrate to protov2 the go_package needs to be updated. +// We need this right now because gogoproto codegen needs to import the extension. +option go_package = "github.com/cosmos/cosmos-sdk/types/tx/amino"; + +extend google.protobuf.MessageOptions { + // name is the string used when registering a concrete + // type into the Amino type registry, via the Amino codec's + // `RegisterConcrete()` method. This string MUST be at most 39 + // characters long, or else the message will be rejected by the + // Ledger hardware device. + string name = 11110001; + + // encoding describes the encoding format used by Amino for the given + // message. The field type is chosen to be a string for + // flexibility, but it should ideally be short and expected to be + // machine-readable, for example "base64" or "utf8_json". We + // highly recommend to use underscores for word separation instead of spaces. + // + // If left empty, then the Amino encoding is expected to be the same as the + // Protobuf one. + // + // This annotation should not be confused with the `encoding` + // one which operates on the field level. + string message_encoding = 11110002; +} + +extend google.protobuf.FieldOptions { + // encoding describes the encoding format used by Amino for + // the given field. The field type is chosen to be a string for + // flexibility, but it should ideally be short and expected to be + // machine-readable, for example "base64" or "utf8_json". We + // highly recommend to use underscores for word separation instead of spaces. + // + // If left empty, then the Amino encoding is expected to be the same as the + // Protobuf one. + // + // This annotation should not be confused with the + // `message_encoding` one which operates on the message level. + string encoding = 11110003; + + // field_name sets a different field name (i.e. key name) in + // the amino JSON object for the given field. + // + // Example: + // + // message Foo { + // string bar = 1 [(amino.field_name) = "baz"]; + // } + // + // Then the Amino encoding of Foo will be: + // `{"baz":"some value"}` + string field_name = 11110004; + + // dont_omitempty sets the field in the JSON object even if + // its value is empty, i.e. equal to the Golang zero value. To learn what + // the zero values are, see https://go.dev/ref/spec#The_zero_value. + // + // Fields default to `omitempty`, which is the default behavior when this + // annotation is unset. When set to true, then the field value in the + // JSON object will be set, i.e. not `undefined`. + // + // Example: + // + // message Foo { + // string bar = 1; + // string baz = 2 [(amino.dont_omitempty) = true]; + // } + // + // f := Foo{}; + // out := AminoJSONEncoder(&f); + // out == {"baz":""} + bool dont_omitempty = 11110005; +} \ No newline at end of file diff --git a/packages/codegen/proto/confio/LICENSE b/packages/codegen/proto/confio/LICENSE new file mode 100644 index 00000000..deaad1f5 --- /dev/null +++ b/packages/codegen/proto/confio/LICENSE @@ -0,0 +1,204 @@ +Confio/ICS23 +License: Apache2.0 + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Confio UO + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/packages/codegen/proto/confio/README.md b/packages/codegen/proto/confio/README.md new file mode 100644 index 00000000..af52fb63 --- /dev/null +++ b/packages/codegen/proto/confio/README.md @@ -0,0 +1 @@ +# confio \ No newline at end of file diff --git a/packages/codegen/proto/confio/proofs.proto b/packages/codegen/proto/confio/proofs.proto new file mode 100644 index 00000000..da43503e --- /dev/null +++ b/packages/codegen/proto/confio/proofs.proto @@ -0,0 +1,234 @@ +syntax = "proto3"; + +package ics23; +option go_package = "github.com/confio/ics23/go"; + +enum HashOp { + // NO_HASH is the default if no data passed. Note this is an illegal argument some places. + NO_HASH = 0; + SHA256 = 1; + SHA512 = 2; + KECCAK = 3; + RIPEMD160 = 4; + BITCOIN = 5; // ripemd160(sha256(x)) +} + +/** +LengthOp defines how to process the key and value of the LeafOp +to include length information. After encoding the length with the given +algorithm, the length will be prepended to the key and value bytes. +(Each one with it's own encoded length) +*/ +enum LengthOp { + // NO_PREFIX don't include any length info + NO_PREFIX = 0; + // VAR_PROTO uses protobuf (and go-amino) varint encoding of the length + VAR_PROTO = 1; + // VAR_RLP uses rlp int encoding of the length + VAR_RLP = 2; + // FIXED32_BIG uses big-endian encoding of the length as a 32 bit integer + FIXED32_BIG = 3; + // FIXED32_LITTLE uses little-endian encoding of the length as a 32 bit integer + FIXED32_LITTLE = 4; + // FIXED64_BIG uses big-endian encoding of the length as a 64 bit integer + FIXED64_BIG = 5; + // FIXED64_LITTLE uses little-endian encoding of the length as a 64 bit integer + FIXED64_LITTLE = 6; + // REQUIRE_32_BYTES is like NONE, but will fail if the input is not exactly 32 bytes (sha256 output) + REQUIRE_32_BYTES = 7; + // REQUIRE_64_BYTES is like NONE, but will fail if the input is not exactly 64 bytes (sha512 output) + REQUIRE_64_BYTES = 8; +} + +/** +ExistenceProof takes a key and a value and a set of steps to perform on it. +The result of peforming all these steps will provide a "root hash", which can +be compared to the value in a header. + +Since it is computationally infeasible to produce a hash collission for any of the used +cryptographic hash functions, if someone can provide a series of operations to transform +a given key and value into a root hash that matches some trusted root, these key and values +must be in the referenced merkle tree. + +The only possible issue is maliablity in LeafOp, such as providing extra prefix data, +which should be controlled by a spec. Eg. with lengthOp as NONE, + prefix = FOO, key = BAR, value = CHOICE +and + prefix = F, key = OOBAR, value = CHOICE +would produce the same value. + +With LengthOp this is tricker but not impossible. Which is why the "leafPrefixEqual" field +in the ProofSpec is valuable to prevent this mutability. And why all trees should +length-prefix the data before hashing it. +*/ +message ExistenceProof { + bytes key = 1; + bytes value = 2; + LeafOp leaf = 3; + repeated InnerOp path = 4; +} + +/* +NonExistenceProof takes a proof of two neighbors, one left of the desired key, +one right of the desired key. If both proofs are valid AND they are neighbors, +then there is no valid proof for the given key. +*/ +message NonExistenceProof { + bytes key = 1; // TODO: remove this as unnecessary??? we prove a range + ExistenceProof left = 2; + ExistenceProof right = 3; +} + +/* +CommitmentProof is either an ExistenceProof or a NonExistenceProof, or a Batch of such messages +*/ +message CommitmentProof { + oneof proof { + ExistenceProof exist = 1; + NonExistenceProof nonexist = 2; + BatchProof batch = 3; + CompressedBatchProof compressed = 4; + } +} + +/** +LeafOp represents the raw key-value data we wish to prove, and +must be flexible to represent the internal transformation from +the original key-value pairs into the basis hash, for many existing +merkle trees. + +key and value are passed in. So that the signature of this operation is: + leafOp(key, value) -> output + +To process this, first prehash the keys and values if needed (ANY means no hash in this case): + hkey = prehashKey(key) + hvalue = prehashValue(value) + +Then combine the bytes, and hash it + output = hash(prefix || length(hkey) || hkey || length(hvalue) || hvalue) +*/ +message LeafOp { + HashOp hash = 1; + HashOp prehash_key = 2; + HashOp prehash_value = 3; + LengthOp length = 4; + // prefix is a fixed bytes that may optionally be included at the beginning to differentiate + // a leaf node from an inner node. + bytes prefix = 5; +} + +/** +InnerOp represents a merkle-proof step that is not a leaf. +It represents concatenating two children and hashing them to provide the next result. + +The result of the previous step is passed in, so the signature of this op is: + innerOp(child) -> output + +The result of applying InnerOp should be: + output = op.hash(op.prefix || child || op.suffix) + + where the || operator is concatenation of binary data, +and child is the result of hashing all the tree below this step. + +Any special data, like prepending child with the length, or prepending the entire operation with +some value to differentiate from leaf nodes, should be included in prefix and suffix. +If either of prefix or suffix is empty, we just treat it as an empty string +*/ +message InnerOp { + HashOp hash = 1; + bytes prefix = 2; + bytes suffix = 3; +} + + +/** +ProofSpec defines what the expected parameters are for a given proof type. +This can be stored in the client and used to validate any incoming proofs. + + verify(ProofSpec, Proof) -> Proof | Error + +As demonstrated in tests, if we don't fix the algorithm used to calculate the +LeafHash for a given tree, there are many possible key-value pairs that can +generate a given hash (by interpretting the preimage differently). +We need this for proper security, requires client knows a priori what +tree format server uses. But not in code, rather a configuration object. +*/ +message ProofSpec { + // any field in the ExistenceProof must be the same as in this spec. + // except Prefix, which is just the first bytes of prefix (spec can be longer) + LeafOp leaf_spec = 1; + InnerSpec inner_spec = 2; + // max_depth (if > 0) is the maximum number of InnerOps allowed (mainly for fixed-depth tries) + int32 max_depth = 3; + // min_depth (if > 0) is the minimum number of InnerOps allowed (mainly for fixed-depth tries) + int32 min_depth = 4; +} + +/* +InnerSpec contains all store-specific structure info to determine if two proofs from a +given store are neighbors. + +This enables: + + isLeftMost(spec: InnerSpec, op: InnerOp) + isRightMost(spec: InnerSpec, op: InnerOp) + isLeftNeighbor(spec: InnerSpec, left: InnerOp, right: InnerOp) +*/ +message InnerSpec { + // Child order is the ordering of the children node, must count from 0 + // iavl tree is [0, 1] (left then right) + // merk is [0, 2, 1] (left, right, here) + repeated int32 child_order = 1; + int32 child_size = 2; + int32 min_prefix_length = 3; + int32 max_prefix_length = 4; + // empty child is the prehash image that is used when one child is nil (eg. 20 bytes of 0) + bytes empty_child = 5; + // hash is the algorithm that must be used for each InnerOp + HashOp hash = 6; +} + +/* +BatchProof is a group of multiple proof types than can be compressed +*/ +message BatchProof { + repeated BatchEntry entries = 1; +} + +// Use BatchEntry not CommitmentProof, to avoid recursion +message BatchEntry { + oneof proof { + ExistenceProof exist = 1; + NonExistenceProof nonexist = 2; + } +} + + +/****** all items here are compressed forms *******/ + +message CompressedBatchProof { + repeated CompressedBatchEntry entries = 1; + repeated InnerOp lookup_inners = 2; +} + +// Use BatchEntry not CommitmentProof, to avoid recursion +message CompressedBatchEntry { + oneof proof { + CompressedExistenceProof exist = 1; + CompressedNonExistenceProof nonexist = 2; + } +} + +message CompressedExistenceProof { + bytes key = 1; + bytes value = 2; + LeafOp leaf = 3; + // these are indexes into the lookup_inners table in CompressedBatchProof + repeated int32 path = 4; +} + +message CompressedNonExistenceProof { + bytes key = 1; // TODO: remove this as unnecessary??? we prove a range + CompressedExistenceProof left = 2; + CompressedExistenceProof right = 3; +} diff --git a/packages/codegen/proto/cosmos/LICENSE b/packages/codegen/proto/cosmos/LICENSE new file mode 100644 index 00000000..063e03fc --- /dev/null +++ b/packages/codegen/proto/cosmos/LICENSE @@ -0,0 +1,204 @@ +Cosmos SDK +License: Apache2.0 + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2016 All in Bits, Inc + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/packages/codegen/proto/cosmos/README.md b/packages/codegen/proto/cosmos/README.md new file mode 100644 index 00000000..98a49c6b --- /dev/null +++ b/packages/codegen/proto/cosmos/README.md @@ -0,0 +1 @@ +# cosmos \ No newline at end of file diff --git a/packages/codegen/proto/cosmos/app/v1alpha1/config.proto b/packages/codegen/proto/cosmos/app/v1alpha1/config.proto new file mode 100644 index 00000000..ed775006 --- /dev/null +++ b/packages/codegen/proto/cosmos/app/v1alpha1/config.proto @@ -0,0 +1,36 @@ +syntax = "proto3"; + +package cosmos.app.v1alpha1; + +import "google/protobuf/any.proto"; + +// Config represents the configuration for a Cosmos SDK ABCI app. +// It is intended that all state machine logic including the version of +// baseapp and tx handlers (and possibly even Tendermint) that an app needs +// can be described in a config object. For compatibility, the framework should +// allow a mixture of declarative and imperative app wiring, however, apps +// that strive for the maximum ease of maintainability should be able to describe +// their state machine with a config object alone. +message Config { + // modules are the module configurations for the app. + repeated ModuleConfig modules = 1; +} + +// ModuleConfig is a module configuration for an app. +message ModuleConfig { + // name is the unique name of the module within the app. It should be a name + // that persists between different versions of a module so that modules + // can be smoothly upgraded to new versions. + // + // For example, for the module cosmos.bank.module.v1.Module, we may chose + // to simply name the module "bank" in the app. When we upgrade to + // cosmos.bank.module.v2.Module, the app-specific name "bank" stays the same + // and the framework knows that the v2 module should receive all the same state + // that the v1 module had. Note: modules should provide info on which versions + // they can migrate from in the ModuleDescriptor.can_migration_from field. + string name = 1; + + // config is the config object for the module. Module config messages should + // define a ModuleDescriptor using the cosmos.app.v1alpha1.is_module extension. + google.protobuf.Any config = 2; +} diff --git a/packages/codegen/proto/cosmos/app/v1alpha1/module.proto b/packages/codegen/proto/cosmos/app/v1alpha1/module.proto new file mode 100644 index 00000000..599078d7 --- /dev/null +++ b/packages/codegen/proto/cosmos/app/v1alpha1/module.proto @@ -0,0 +1,93 @@ +syntax = "proto3"; + +package cosmos.app.v1alpha1; + +import "google/protobuf/descriptor.proto"; + +extend google.protobuf.MessageOptions { + // module indicates that this proto type is a config object for an app module + // and optionally provides other descriptive information about the module. + // It is recommended that a new module config object and go module is versioned + // for every state machine breaking version of a module. The recommended + // pattern for doing this is to put module config objects in a separate proto + // package from the API they expose. Ex: the cosmos.group.v1 API would be + // exposed by module configs cosmos.group.module.v1, cosmos.group.module.v2, etc. + ModuleDescriptor module = 57193479; +} + +// ModuleDescriptor describes an app module. +message ModuleDescriptor { + // go_import names the package that should be imported by an app to load the + // module in the runtime module registry. Either go_import must be defined here + // or the go_package option must be defined at the file level to indicate + // to users where to location the module implementation. go_import takes + // precedence over go_package when both are defined. + string go_import = 1; + + // use_package refers to a protobuf package that this module + // uses and exposes to the world. In an app, only one module should "use" + // or own a single protobuf package. It is assumed that the module uses + // all of the .proto files in a single package. + repeated PackageReference use_package = 2; + + // can_migrate_from defines which module versions this module can migrate + // state from. The framework will check that one module version is able to + // migrate from a previous module version before attempting to update its + // config. It is assumed that modules can transitively migrate from earlier + // versions. For instance if v3 declares it can migrate from v2, and v2 + // declares it can migrate from v1, the framework knows how to migrate + // from v1 to v3, assuming all 3 module versions are registered at runtime. + repeated MigrateFromInfo can_migrate_from = 3; +} + +// PackageReference is a reference to a protobuf package used by a module. +message PackageReference { + // name is the fully-qualified name of the package. + string name = 1; + + // revision is the optional revision of the package that is being used. + // Protobuf packages used in Cosmos should generally have a major version + // as the last part of the package name, ex. foo.bar.baz.v1. + // The revision of a package can be thought of as the minor version of a + // package which has additional backwards compatible definitions that weren't + // present in a previous version. + // + // A package should indicate its revision with a source code comment + // above the package declaration in one of its fields containing the + // test "Revision N" where N is an integer revision. All packages start + // at revision 0 the first time they are released in a module. + // + // When a new version of a module is released and items are added to existing + // .proto files, these definitions should contain comments of the form + // "Since Revision N" where N is an integer revision. + // + // When the module runtime starts up, it will check the pinned proto + // image and panic if there are runtime protobuf definitions that are not + // in the pinned descriptor which do not have + // a "Since Revision N" comment or have a "Since Revision N" comment where + // N is <= to the revision specified here. This indicates that the protobuf + // files have been updated, but the pinned file descriptor hasn't. + // + // If there are items in the pinned file descriptor with a revision + // greater than the value indicated here, this will also cause a panic + // as it may mean that the pinned descriptor for a legacy module has been + // improperly updated or that there is some other versioning discrepancy. + // Runtime protobuf definitions will also be checked for compatibility + // with pinned file descriptors to make sure there are no incompatible changes. + // + // This behavior ensures that: + // * pinned proto images are up-to-date + // * protobuf files are carefully annotated with revision comments which + // are important good client UX + // * protobuf files are changed in backwards and forwards compatible ways + uint32 revision = 2; +} + +// MigrateFromInfo is information on a module version that a newer module +// can migrate from. +message MigrateFromInfo { + + // module is the fully-qualified protobuf name of the module config object + // for the previous module version, ex: "cosmos.group.module.v1.Module". + string module = 1; +} diff --git a/packages/codegen/proto/cosmos/app/v1alpha1/query.proto b/packages/codegen/proto/cosmos/app/v1alpha1/query.proto new file mode 100644 index 00000000..efec9c81 --- /dev/null +++ b/packages/codegen/proto/cosmos/app/v1alpha1/query.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +package cosmos.app.v1alpha1; + +import "cosmos/app/v1alpha1/config.proto"; + +// Query is the app module query service. +service Query { + + // Config returns the current app config. + rpc Config(QueryConfigRequest) returns (QueryConfigResponse) {} +} + +// QueryConfigRequest is the Query/Config request type. +message QueryConfigRequest {} + +// QueryConfigRequest is the Query/Config response type. +message QueryConfigResponse { + + // config is the current app config. + Config config = 1; +} diff --git a/packages/codegen/proto/cosmos/auth/v1beta1/auth.proto b/packages/codegen/proto/cosmos/auth/v1beta1/auth.proto new file mode 100644 index 00000000..486d507f --- /dev/null +++ b/packages/codegen/proto/cosmos/auth/v1beta1/auth.proto @@ -0,0 +1,47 @@ +syntax = "proto3"; +package cosmos.auth.v1beta1; + +import "cosmos_proto/cosmos.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/auth/types"; + +// BaseAccount defines a base account type. It contains all the necessary fields +// for basic account functionality. Any custom account type should extend this +// type for additional functionality (e.g. vesting). +message BaseAccount { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + option (gogoproto.equal) = false; + + option (cosmos_proto.implements_interface) = "cosmos.auth.AccountI"; + + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + google.protobuf.Any pub_key = 2 [(gogoproto.jsontag) = "public_key,omitempty"]; + uint64 account_number = 3; + uint64 sequence = 4; +} + +// ModuleAccount defines an account for modules that holds coins on a pool. +message ModuleAccount { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + option (cosmos_proto.implements_interface) = "cosmos.auth.ModuleAccountI"; + + BaseAccount base_account = 1 [(gogoproto.embed) = true]; + string name = 2; + repeated string permissions = 3; +} + +// Params defines the parameters for the auth module. +message Params { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + + uint64 max_memo_characters = 1; + uint64 tx_sig_limit = 2; + uint64 tx_size_cost_per_byte = 3; + uint64 sig_verify_cost_ed25519 = 4 [(gogoproto.customname) = "SigVerifyCostED25519"]; + uint64 sig_verify_cost_secp256k1 = 5 [(gogoproto.customname) = "SigVerifyCostSecp256k1"]; +} diff --git a/packages/codegen/proto/cosmos/auth/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/auth/v1beta1/genesis.proto new file mode 100644 index 00000000..c88b94ee --- /dev/null +++ b/packages/codegen/proto/cosmos/auth/v1beta1/genesis.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; +package cosmos.auth.v1beta1; + +import "google/protobuf/any.proto"; +import "gogoproto/gogo.proto"; +import "cosmos/auth/v1beta1/auth.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/auth/types"; + +// GenesisState defines the auth module's genesis state. +message GenesisState { + // params defines all the paramaters of the module. + Params params = 1 [(gogoproto.nullable) = false]; + + // accounts are the accounts present at genesis. + repeated google.protobuf.Any accounts = 2; +} diff --git a/packages/codegen/proto/cosmos/auth/v1beta1/query.proto b/packages/codegen/proto/cosmos/auth/v1beta1/query.proto new file mode 100644 index 00000000..8de4d09d --- /dev/null +++ b/packages/codegen/proto/cosmos/auth/v1beta1/query.proto @@ -0,0 +1,130 @@ +syntax = "proto3"; +package cosmos.auth.v1beta1; + +import "cosmos/base/query/v1beta1/pagination.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; +import "google/api/annotations.proto"; +import "cosmos/auth/v1beta1/auth.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/auth/types"; + +// Query defines the gRPC querier service. +service Query { + // Accounts returns all the existing accounts + // + // Since: cosmos-sdk 0.43 + rpc Accounts(QueryAccountsRequest) returns (QueryAccountsResponse) { + option (google.api.http).get = "/cosmos/auth/v1beta1/accounts"; + } + + // Account returns account details based on address. + rpc Account(QueryAccountRequest) returns (QueryAccountResponse) { + option (google.api.http).get = "/cosmos/auth/v1beta1/accounts/{address}"; + } + + // Params queries all parameters. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/cosmos/auth/v1beta1/params"; + } + + // ModuleAccounts returns all the existing module accounts. + rpc ModuleAccounts(QueryModuleAccountsRequest) returns (QueryModuleAccountsResponse) { + option (google.api.http).get = "/cosmos/auth/v1beta1/module_accounts"; + } + + // Bech32 queries bech32Prefix + rpc Bech32Prefix(Bech32PrefixRequest) returns (Bech32PrefixResponse) { + option (google.api.http).get = "/cosmos/auth/v1beta1/bech32"; + } + + // AddressBytesToString converts Account Address bytes to string + rpc AddressBytesToString(AddressBytesToStringRequest) returns (AddressBytesToStringResponse) { + option (google.api.http).get = "/cosmos/auth/v1beta1/bech32/{address_bytes}"; + } + + // AddressStringToBytes converts Address string to bytes + rpc AddressStringToBytes(AddressStringToBytesRequest) returns (AddressStringToBytesResponse) { + option (google.api.http).get = "/cosmos/auth/v1beta1/bech32/{address_string}"; + } +} + +// QueryAccountsRequest is the request type for the Query/Accounts RPC method. +// +// Since: cosmos-sdk 0.43 +message QueryAccountsRequest { + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// QueryAccountsResponse is the response type for the Query/Accounts RPC method. +// +// Since: cosmos-sdk 0.43 +message QueryAccountsResponse { + // accounts are the existing accounts + repeated google.protobuf.Any accounts = 1 [(cosmos_proto.accepts_interface) = "cosmos.auth.AccountI"]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryAccountRequest is the request type for the Query/Account RPC method. +message QueryAccountRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // address defines the address to query for. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryModuleAccountsRequest is the request type for the Query/ModuleAccounts RPC method. +message QueryModuleAccountsRequest {} + +// QueryParamsResponse is the response type for the Query/Params RPC method. +message QueryParamsResponse { + // params defines the parameters of the module. + Params params = 1 [(gogoproto.nullable) = false]; +} + +// QueryAccountResponse is the response type for the Query/Account RPC method. +message QueryAccountResponse { + // account defines the account of the corresponding address. + google.protobuf.Any account = 1 [(cosmos_proto.accepts_interface) = "cosmos.auth.AccountI"]; +} + +// QueryParamsRequest is the request type for the Query/Params RPC method. +message QueryParamsRequest {} + +// QueryModuleAccountsResponse is the response type for the Query/ModuleAccounts RPC method. +message QueryModuleAccountsResponse { + repeated google.protobuf.Any accounts = 1 [(cosmos_proto.accepts_interface) = "cosmos.auth.ModuleAccountI"]; +} + +// Bech32PrefixRequest is the request type for Bech32Prefix rpc method +message Bech32PrefixRequest {} + +// Bech32PrefixResponse is the response type for Bech32Prefix rpc method +message Bech32PrefixResponse { + string bech32_prefix = 1; +} + +// AddressBytesToStringRequest is the request type for AddressString rpc method +message AddressBytesToStringRequest { + bytes address_bytes = 1; +} + +// AddressBytesToStringResponse is the response type for AddressString rpc method +message AddressBytesToStringResponse { + string address_string = 1; +} + +// AddressStringToBytesRequest is the request type for AccountBytes rpc method +message AddressStringToBytesRequest { + string address_string = 1; +} + +// AddressStringToBytesResponse is the response type for AddressBytes rpc method +message AddressStringToBytesResponse { + bytes address_bytes = 1; +} diff --git a/packages/codegen/proto/cosmos/authz/v1beta1/authz.proto b/packages/codegen/proto/cosmos/authz/v1beta1/authz.proto new file mode 100644 index 00000000..2dce1ce0 --- /dev/null +++ b/packages/codegen/proto/cosmos/authz/v1beta1/authz.proto @@ -0,0 +1,46 @@ +// Since: cosmos-sdk 0.43 +syntax = "proto3"; +package cosmos.authz.v1beta1; + +import "cosmos_proto/cosmos.proto"; +import "google/protobuf/timestamp.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/authz"; +option (gogoproto.goproto_getters_all) = false; + +// GenericAuthorization gives the grantee unrestricted permissions to execute +// the provided method on behalf of the granter's account. +message GenericAuthorization { + option (cosmos_proto.implements_interface) = "cosmos.authz.Authorization"; + + // Msg, identified by it's type URL, to grant unrestricted permissions to execute + string msg = 1; +} + +// Grant gives permissions to execute +// the provide method with expiration time. +message Grant { + google.protobuf.Any authorization = 1 [(cosmos_proto.accepts_interface) = "cosmos.authz.Authorization"]; + // time when the grant will expire and will be pruned. If null, then the grant + // doesn't have a time expiration (other conditions in `authorization` + // may apply to invalidate the grant) + google.protobuf.Timestamp expiration = 2 [(gogoproto.stdtime) = true, (gogoproto.nullable) = true]; +} + +// GrantAuthorization extends a grant with both the addresses of the grantee and granter. +// It is used in genesis.proto and query.proto +message GrantAuthorization { + string granter = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string grantee = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + google.protobuf.Any authorization = 3 [(cosmos_proto.accepts_interface) = "cosmos.authz.Authorization"]; + google.protobuf.Timestamp expiration = 4 [(gogoproto.stdtime) = true]; +} + +// GrantQueueItem contains the list of TypeURL of a sdk.Msg. +message GrantQueueItem { + // msg_type_urls contains the list of TypeURL of a sdk.Msg. + repeated string msg_type_urls = 1; +} diff --git a/packages/codegen/proto/cosmos/authz/v1beta1/event.proto b/packages/codegen/proto/cosmos/authz/v1beta1/event.proto new file mode 100644 index 00000000..0476649a --- /dev/null +++ b/packages/codegen/proto/cosmos/authz/v1beta1/event.proto @@ -0,0 +1,27 @@ +// Since: cosmos-sdk 0.43 +syntax = "proto3"; +package cosmos.authz.v1beta1; + +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/authz"; + +// EventGrant is emitted on Msg/Grant +message EventGrant { + // Msg type URL for which an autorization is granted + string msg_type_url = 2; + // Granter account address + string granter = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // Grantee account address + string grantee = 4 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// EventRevoke is emitted on Msg/Revoke +message EventRevoke { + // Msg type URL for which an autorization is revoked + string msg_type_url = 2; + // Granter account address + string granter = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // Grantee account address + string grantee = 4 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} diff --git a/packages/codegen/proto/cosmos/authz/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/authz/v1beta1/genesis.proto new file mode 100644 index 00000000..310f6265 --- /dev/null +++ b/packages/codegen/proto/cosmos/authz/v1beta1/genesis.proto @@ -0,0 +1,13 @@ +// Since: cosmos-sdk 0.43 +syntax = "proto3"; +package cosmos.authz.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos/authz/v1beta1/authz.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/authz"; + +// GenesisState defines the authz module's genesis state. +message GenesisState { + repeated GrantAuthorization authorization = 1 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/authz/v1beta1/query.proto b/packages/codegen/proto/cosmos/authz/v1beta1/query.proto new file mode 100644 index 00000000..62154ac1 --- /dev/null +++ b/packages/codegen/proto/cosmos/authz/v1beta1/query.proto @@ -0,0 +1,82 @@ +// Since: cosmos-sdk 0.43 +syntax = "proto3"; +package cosmos.authz.v1beta1; + +import "google/api/annotations.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "cosmos/authz/v1beta1/authz.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/authz"; + +// Query defines the gRPC querier service. +service Query { + // Returns list of `Authorization`, granted to the grantee by the granter. + rpc Grants(QueryGrantsRequest) returns (QueryGrantsResponse) { + option (google.api.http).get = "/cosmos/authz/v1beta1/grants"; + } + + // GranterGrants returns list of `GrantAuthorization`, granted by granter. + // + // Since: cosmos-sdk 0.46 + rpc GranterGrants(QueryGranterGrantsRequest) returns (QueryGranterGrantsResponse) { + option (google.api.http).get = "/cosmos/authz/v1beta1/grants/granter/{granter}"; + } + + // GranteeGrants returns a list of `GrantAuthorization` by grantee. + // + // Since: cosmos-sdk 0.46 + rpc GranteeGrants(QueryGranteeGrantsRequest) returns (QueryGranteeGrantsResponse) { + option (google.api.http).get = "/cosmos/authz/v1beta1/grants/grantee/{grantee}"; + } +} + +// QueryGrantsRequest is the request type for the Query/Grants RPC method. +message QueryGrantsRequest { + string granter = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string grantee = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // Optional, msg_type_url, when set, will query only grants matching given msg type. + string msg_type_url = 3; + // pagination defines an pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 4; +} + +// QueryGrantsResponse is the response type for the Query/Authorizations RPC method. +message QueryGrantsResponse { + // authorizations is a list of grants granted for grantee by granter. + repeated Grant grants = 1; + // pagination defines an pagination for the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryGranterGrantsRequest is the request type for the Query/GranterGrants RPC method. +message QueryGranterGrantsRequest { + string granter = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryGranterGrantsResponse is the response type for the Query/GranterGrants RPC method. +message QueryGranterGrantsResponse { + // grants is a list of grants granted by the granter. + repeated GrantAuthorization grants = 1; + // pagination defines an pagination for the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryGranteeGrantsRequest is the request type for the Query/IssuedGrants RPC method. +message QueryGranteeGrantsRequest { + string grantee = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryGranteeGrantsResponse is the response type for the Query/GranteeGrants RPC method. +message QueryGranteeGrantsResponse { + // grants is a list of grants granted to the grantee. + repeated GrantAuthorization grants = 1; + // pagination defines an pagination for the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} diff --git a/packages/codegen/proto/cosmos/authz/v1beta1/tx.proto b/packages/codegen/proto/cosmos/authz/v1beta1/tx.proto new file mode 100644 index 00000000..9c8ae160 --- /dev/null +++ b/packages/codegen/proto/cosmos/authz/v1beta1/tx.proto @@ -0,0 +1,75 @@ +// Since: cosmos-sdk 0.43 +syntax = "proto3"; +package cosmos.authz.v1beta1; + +import "cosmos_proto/cosmos.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; +import "cosmos/authz/v1beta1/authz.proto"; +import "cosmos/msg/v1/msg.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/authz"; +option (gogoproto.goproto_getters_all) = false; + +// Msg defines the authz Msg service. +service Msg { + // Grant grants the provided authorization to the grantee on the granter's + // account with the provided expiration time. If there is already a grant + // for the given (granter, grantee, Authorization) triple, then the grant + // will be overwritten. + rpc Grant(MsgGrant) returns (MsgGrantResponse); + + // Exec attempts to execute the provided messages using + // authorizations granted to the grantee. Each message should have only + // one signer corresponding to the granter of the authorization. + rpc Exec(MsgExec) returns (MsgExecResponse); + + // Revoke revokes any authorization corresponding to the provided method name on the + // granter's account that has been granted to the grantee. + rpc Revoke(MsgRevoke) returns (MsgRevokeResponse); +} + +// MsgGrant is a request type for Grant method. It declares authorization to the grantee +// on behalf of the granter with the provided expiration time. +message MsgGrant { + option (cosmos.msg.v1.signer) = "granter"; + + string granter = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string grantee = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + cosmos.authz.v1beta1.Grant grant = 3 [(gogoproto.nullable) = false]; +} + +// MsgExecResponse defines the Msg/MsgExecResponse response type. +message MsgExecResponse { + repeated bytes results = 1; +} + +// MsgExec attempts to execute the provided messages using +// authorizations granted to the grantee. Each message should have only +// one signer corresponding to the granter of the authorization. +message MsgExec { + option (cosmos.msg.v1.signer) = "grantee"; + + string grantee = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // Authorization Msg requests to execute. Each msg must implement Authorization interface + // The x/authz will try to find a grant matching (msg.signers[0], grantee, MsgTypeURL(msg)) + // triple and validate it. + repeated google.protobuf.Any msgs = 2 [(cosmos_proto.accepts_interface) = "sdk.Msg, cosmos.authz.Authorization"]; +} + +// MsgGrantResponse defines the Msg/MsgGrant response type. +message MsgGrantResponse {} + +// MsgRevoke revokes any authorization with the provided sdk.Msg type on the +// granter's account with that has been granted to the grantee. +message MsgRevoke { + option (cosmos.msg.v1.signer) = "granter"; + + string granter = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string grantee = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string msg_type_url = 3; +} + +// MsgRevokeResponse defines the Msg/MsgRevokeResponse response type. +message MsgRevokeResponse {} diff --git a/packages/codegen/proto/cosmos/bank/v1beta1/authz.proto b/packages/codegen/proto/cosmos/bank/v1beta1/authz.proto new file mode 100644 index 00000000..e3e600b4 --- /dev/null +++ b/packages/codegen/proto/cosmos/bank/v1beta1/authz.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; +package cosmos.bank.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/base/v1beta1/coin.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/bank/types"; + +// SendAuthorization allows the grantee to spend up to spend_limit coins from +// the granter's account. +// +// Since: cosmos-sdk 0.43 +message SendAuthorization { + option (cosmos_proto.implements_interface) = "cosmos.authz.Authorization"; + + repeated cosmos.base.v1beta1.Coin spend_limit = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; +} diff --git a/packages/codegen/proto/cosmos/bank/v1beta1/bank.proto b/packages/codegen/proto/cosmos/bank/v1beta1/bank.proto new file mode 100644 index 00000000..f70c24ab --- /dev/null +++ b/packages/codegen/proto/cosmos/bank/v1beta1/bank.proto @@ -0,0 +1,108 @@ +syntax = "proto3"; +package cosmos.bank.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos/msg/v1/msg.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/bank/types"; + +// Params defines the parameters for the bank module. +message Params { + option (gogoproto.goproto_stringer) = false; + repeated SendEnabled send_enabled = 1; + bool default_send_enabled = 2; +} + +// SendEnabled maps coin denom to a send_enabled status (whether a denom is +// sendable). +message SendEnabled { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + string denom = 1; + bool enabled = 2; +} + +// Input models transaction input. +message Input { + option (cosmos.msg.v1.signer) = "address"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + repeated cosmos.base.v1beta1.Coin coins = 2 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; +} + +// Output models transaction outputs. +message Output { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + repeated cosmos.base.v1beta1.Coin coins = 2 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; +} + +// Supply represents a struct that passively keeps track of the total supply +// amounts in the network. +// This message is deprecated now that supply is indexed by denom. +message Supply { + option deprecated = true; + + option (gogoproto.equal) = true; + option (gogoproto.goproto_getters) = false; + + option (cosmos_proto.implements_interface) = "SupplyI"; + + repeated cosmos.base.v1beta1.Coin total = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; +} + +// DenomUnit represents a struct that describes a given +// denomination unit of the basic token. +message DenomUnit { + // denom represents the string name of the given denom unit (e.g uatom). + string denom = 1; + // exponent represents power of 10 exponent that one must + // raise the base_denom to in order to equal the given DenomUnit's denom + // 1 denom = 10^exponent base_denom + // (e.g. with a base_denom of uatom, one can create a DenomUnit of 'atom' with + // exponent = 6, thus: 1 atom = 10^6 uatom). + uint32 exponent = 2; + // aliases is a list of string aliases for the given denom + repeated string aliases = 3; +} + +// Metadata represents a struct that describes +// a basic token. +message Metadata { + string description = 1; + // denom_units represents the list of DenomUnit's for a given coin + repeated DenomUnit denom_units = 2; + // base represents the base denom (should be the DenomUnit with exponent = 0). + string base = 3; + // display indicates the suggested denom that should be + // displayed in clients. + string display = 4; + // name defines the name of the token (eg: Cosmos Atom) + // + // Since: cosmos-sdk 0.43 + string name = 5; + // symbol is the token symbol usually shown on exchanges (eg: ATOM). This can + // be the same as the display. + // + // Since: cosmos-sdk 0.43 + string symbol = 6; + // URI to a document (on or off-chain) that contains additional information. Optional. + // + // Since: cosmos-sdk 0.46 + string uri = 7 [(gogoproto.customname) = "URI"]; + // URIHash is a sha256 hash of a document pointed by URI. It's used to verify that + // the document didn't change. Optional. + // + // Since: cosmos-sdk 0.46 + string uri_hash = 8 [(gogoproto.customname) = "URIHash"]; +} diff --git a/packages/codegen/proto/cosmos/bank/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/bank/v1beta1/genesis.proto new file mode 100644 index 00000000..aa35790b --- /dev/null +++ b/packages/codegen/proto/cosmos/bank/v1beta1/genesis.proto @@ -0,0 +1,40 @@ +syntax = "proto3"; +package cosmos.bank.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos/bank/v1beta1/bank.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/bank/types"; + +// GenesisState defines the bank module's genesis state. +message GenesisState { + // params defines all the paramaters of the module. + Params params = 1 [(gogoproto.nullable) = false]; + + // balances is an array containing the balances of all the accounts. + repeated Balance balances = 2 [(gogoproto.nullable) = false]; + + // supply represents the total supply. If it is left empty, then supply will be calculated based on the provided + // balances. Otherwise, it will be used to validate that the sum of the balances equals this amount. + repeated cosmos.base.v1beta1.Coin supply = 3 + [(gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", (gogoproto.nullable) = false]; + + // denom_metadata defines the metadata of the differents coins. + repeated Metadata denom_metadata = 4 [(gogoproto.nullable) = false]; +} + +// Balance defines an account address and balance pair used in the bank module's +// genesis state. +message Balance { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // address is the address of the balance holder. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // coins defines the different coins this balance holds. + repeated cosmos.base.v1beta1.Coin coins = 2 + [(gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", (gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/bank/v1beta1/query.proto b/packages/codegen/proto/cosmos/bank/v1beta1/query.proto new file mode 100644 index 00000000..cbe7f38a --- /dev/null +++ b/packages/codegen/proto/cosmos/bank/v1beta1/query.proto @@ -0,0 +1,231 @@ +syntax = "proto3"; +package cosmos.bank.v1beta1; + +import "cosmos/base/query/v1beta1/pagination.proto"; +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos/bank/v1beta1/bank.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/bank/types"; + +// Query defines the gRPC querier service. +service Query { + // Balance queries the balance of a single coin for a single account. + rpc Balance(QueryBalanceRequest) returns (QueryBalanceResponse) { + option (google.api.http).get = "/cosmos/bank/v1beta1/balances/{address}/by_denom"; + } + + // AllBalances queries the balance of all coins for a single account. + rpc AllBalances(QueryAllBalancesRequest) returns (QueryAllBalancesResponse) { + option (google.api.http).get = "/cosmos/bank/v1beta1/balances/{address}"; + } + + // SpendableBalances queries the spenable balance of all coins for a single + // account. + rpc SpendableBalances(QuerySpendableBalancesRequest) returns (QuerySpendableBalancesResponse) { + option (google.api.http).get = "/cosmos/bank/v1beta1/spendable_balances/{address}"; + } + + // TotalSupply queries the total supply of all coins. + rpc TotalSupply(QueryTotalSupplyRequest) returns (QueryTotalSupplyResponse) { + option (google.api.http).get = "/cosmos/bank/v1beta1/supply"; + } + + // SupplyOf queries the supply of a single coin. + rpc SupplyOf(QuerySupplyOfRequest) returns (QuerySupplyOfResponse) { + option (google.api.http).get = "/cosmos/bank/v1beta1/supply/by_denom"; + } + + // Params queries the parameters of x/bank module. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/cosmos/bank/v1beta1/params"; + } + + // DenomsMetadata queries the client metadata of a given coin denomination. + rpc DenomMetadata(QueryDenomMetadataRequest) returns (QueryDenomMetadataResponse) { + option (google.api.http).get = "/cosmos/bank/v1beta1/denoms_metadata/{denom}"; + } + + // DenomsMetadata queries the client metadata for all registered coin + // denominations. + rpc DenomsMetadata(QueryDenomsMetadataRequest) returns (QueryDenomsMetadataResponse) { + option (google.api.http).get = "/cosmos/bank/v1beta1/denoms_metadata"; + } + + // DenomOwners queries for all account addresses that own a particular token + // denomination. + rpc DenomOwners(QueryDenomOwnersRequest) returns (QueryDenomOwnersResponse) { + option (google.api.http).get = "/cosmos/bank/v1beta1/denom_owners/{denom}"; + } +} + +// QueryBalanceRequest is the request type for the Query/Balance RPC method. +message QueryBalanceRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // address is the address to query balances for. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // denom is the coin denom to query balances for. + string denom = 2; +} + +// QueryBalanceResponse is the response type for the Query/Balance RPC method. +message QueryBalanceResponse { + // balance is the balance of the coin. + cosmos.base.v1beta1.Coin balance = 1; +} + +// QueryBalanceRequest is the request type for the Query/AllBalances RPC method. +message QueryAllBalancesRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // address is the address to query balances for. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryAllBalancesResponse is the response type for the Query/AllBalances RPC +// method. +message QueryAllBalancesResponse { + // balances is the balances of all the coins. + repeated cosmos.base.v1beta1.Coin balances = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QuerySpendableBalancesRequest defines the gRPC request structure for querying +// an account's spendable balances. +message QuerySpendableBalancesRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // address is the address to query spendable balances for. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QuerySpendableBalancesResponse defines the gRPC response structure for querying +// an account's spendable balances. +message QuerySpendableBalancesResponse { + // balances is the spendable balances of all the coins. + repeated cosmos.base.v1beta1.Coin balances = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryTotalSupplyRequest is the request type for the Query/TotalSupply RPC +// method. +message QueryTotalSupplyRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // pagination defines an optional pagination for the request. + // + // Since: cosmos-sdk 0.43 + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// QueryTotalSupplyResponse is the response type for the Query/TotalSupply RPC +// method +message QueryTotalSupplyResponse { + // supply is the supply of the coins + repeated cosmos.base.v1beta1.Coin supply = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + + // pagination defines the pagination in the response. + // + // Since: cosmos-sdk 0.43 + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QuerySupplyOfRequest is the request type for the Query/SupplyOf RPC method. +message QuerySupplyOfRequest { + // denom is the coin denom to query balances for. + string denom = 1; +} + +// QuerySupplyOfResponse is the response type for the Query/SupplyOf RPC method. +message QuerySupplyOfResponse { + // amount is the supply of the coin. + cosmos.base.v1beta1.Coin amount = 1 [(gogoproto.nullable) = false]; +} + +// QueryParamsRequest defines the request type for querying x/bank parameters. +message QueryParamsRequest {} + +// QueryParamsResponse defines the response type for querying x/bank parameters. +message QueryParamsResponse { + Params params = 1 [(gogoproto.nullable) = false]; +} + +// QueryDenomsMetadataRequest is the request type for the Query/DenomsMetadata RPC method. +message QueryDenomsMetadataRequest { + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// QueryDenomsMetadataResponse is the response type for the Query/DenomsMetadata RPC +// method. +message QueryDenomsMetadataResponse { + // metadata provides the client information for all the registered tokens. + repeated Metadata metadatas = 1 [(gogoproto.nullable) = false]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryDenomMetadataRequest is the request type for the Query/DenomMetadata RPC method. +message QueryDenomMetadataRequest { + // denom is the coin denom to query the metadata for. + string denom = 1; +} + +// QueryDenomMetadataResponse is the response type for the Query/DenomMetadata RPC +// method. +message QueryDenomMetadataResponse { + // metadata describes and provides all the client information for the requested token. + Metadata metadata = 1 [(gogoproto.nullable) = false]; +} + +// QueryDenomOwnersRequest defines the request type for the DenomOwners RPC query, +// which queries for a paginated set of all account holders of a particular +// denomination. +message QueryDenomOwnersRequest { + // denom defines the coin denomination to query all account holders for. + string denom = 1; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// DenomOwner defines structure representing an account that owns or holds a +// particular denominated token. It contains the account address and account +// balance of the denominated token. +message DenomOwner { + // address defines the address that owns a particular denomination. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // balance is the balance of the denominated coin for an account. + cosmos.base.v1beta1.Coin balance = 2 [(gogoproto.nullable) = false]; +} + +// QueryDenomOwnersResponse defines the RPC response of a DenomOwners RPC query. +message QueryDenomOwnersResponse { + repeated DenomOwner denom_owners = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} diff --git a/packages/codegen/proto/cosmos/bank/v1beta1/tx.proto b/packages/codegen/proto/cosmos/bank/v1beta1/tx.proto new file mode 100644 index 00000000..22e62cbf --- /dev/null +++ b/packages/codegen/proto/cosmos/bank/v1beta1/tx.proto @@ -0,0 +1,48 @@ +syntax = "proto3"; +package cosmos.bank.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos/bank/v1beta1/bank.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/msg/v1/msg.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/bank/types"; + +// Msg defines the bank Msg service. +service Msg { + // Send defines a method for sending coins from one account to another account. + rpc Send(MsgSend) returns (MsgSendResponse); + + // MultiSend defines a method for sending coins from some accounts to other accounts. + rpc MultiSend(MsgMultiSend) returns (MsgMultiSendResponse); +} + +// MsgSend represents a message to send coins from one account to another. +message MsgSend { + option (cosmos.msg.v1.signer) = "from_address"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string from_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string to_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + repeated cosmos.base.v1beta1.Coin amount = 3 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; +} + +// MsgSendResponse defines the Msg/Send response type. +message MsgSendResponse {} + +// MsgMultiSend represents an arbitrary multi-in, multi-out send message. +message MsgMultiSend { + option (cosmos.msg.v1.signer) = "inputs"; + + option (gogoproto.equal) = false; + + repeated Input inputs = 1 [(gogoproto.nullable) = false]; + repeated Output outputs = 2 [(gogoproto.nullable) = false]; +} + +// MsgMultiSendResponse defines the Msg/MultiSend response type. +message MsgMultiSendResponse {} diff --git a/packages/codegen/proto/cosmos/base/abci/v1beta1/abci.proto b/packages/codegen/proto/cosmos/base/abci/v1beta1/abci.proto new file mode 100644 index 00000000..09a2fcc4 --- /dev/null +++ b/packages/codegen/proto/cosmos/base/abci/v1beta1/abci.proto @@ -0,0 +1,158 @@ +syntax = "proto3"; +package cosmos.base.abci.v1beta1; + +import "gogoproto/gogo.proto"; +import "tendermint/abci/types.proto"; +import "google/protobuf/any.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/types"; +option (gogoproto.goproto_stringer_all) = false; + +// TxResponse defines a structure containing relevant tx data and metadata. The +// tags are stringified and the log is JSON decoded. +message TxResponse { + option (gogoproto.goproto_getters) = false; + // The block height + int64 height = 1; + // The transaction hash. + string txhash = 2 [(gogoproto.customname) = "TxHash"]; + // Namespace for the Code + string codespace = 3; + // Response code. + uint32 code = 4; + // Result bytes, if any. + string data = 5; + // The output of the application's logger (raw string). May be + // non-deterministic. + string raw_log = 6; + // The output of the application's logger (typed). May be non-deterministic. + repeated ABCIMessageLog logs = 7 [(gogoproto.castrepeated) = "ABCIMessageLogs", (gogoproto.nullable) = false]; + // Additional information. May be non-deterministic. + string info = 8; + // Amount of gas requested for transaction. + int64 gas_wanted = 9; + // Amount of gas consumed by transaction. + int64 gas_used = 10; + // The request transaction bytes. + google.protobuf.Any tx = 11; + // Time of the previous block. For heights > 1, it's the weighted median of + // the timestamps of the valid votes in the block.LastCommit. For height == 1, + // it's genesis time. + string timestamp = 12; + // Events defines all the events emitted by processing a transaction. Note, + // these events include those emitted by processing all the messages and those + // emitted from the ante handler. Whereas Logs contains the events, with + // additional metadata, emitted only by processing the messages. + // + // Since: cosmos-sdk 0.42.11, 0.44.5, 0.45 + repeated tendermint.abci.Event events = 13 [(gogoproto.nullable) = false]; +} + +// ABCIMessageLog defines a structure containing an indexed tx ABCI message log. +message ABCIMessageLog { + option (gogoproto.stringer) = true; + + uint32 msg_index = 1 [(gogoproto.jsontag) = "msg_index"]; + string log = 2; + + // Events contains a slice of Event objects that were emitted during some + // execution. + repeated StringEvent events = 3 [(gogoproto.castrepeated) = "StringEvents", (gogoproto.nullable) = false]; +} + +// StringEvent defines en Event object wrapper where all the attributes +// contain key/value pairs that are strings instead of raw bytes. +message StringEvent { + option (gogoproto.stringer) = true; + + string type = 1; + repeated Attribute attributes = 2 [(gogoproto.nullable) = false]; +} + +// Attribute defines an attribute wrapper where the key and value are +// strings instead of raw bytes. +message Attribute { + string key = 1; + string value = 2; +} + +// GasInfo defines tx execution gas context. +message GasInfo { + // GasWanted is the maximum units of work we allow this tx to perform. + uint64 gas_wanted = 1; + + // GasUsed is the amount of gas actually consumed. + uint64 gas_used = 2; +} + +// Result is the union of ResponseFormat and ResponseCheckTx. +message Result { + option (gogoproto.goproto_getters) = false; + + // Data is any data returned from message or handler execution. It MUST be + // length prefixed in order to separate data from multiple message executions. + // Deprecated. This field is still populated, but prefer msg_response instead + // because it also contains the Msg response typeURL. + bytes data = 1 [deprecated = true]; + + // Log contains the log information from message or handler execution. + string log = 2; + + // Events contains a slice of Event objects that were emitted during message + // or handler execution. + repeated tendermint.abci.Event events = 3 [(gogoproto.nullable) = false]; + + // msg_responses contains the Msg handler responses type packed in Anys. + // + // Since: cosmos-sdk 0.46 + repeated google.protobuf.Any msg_responses = 4; +} + +// SimulationResponse defines the response generated when a transaction is +// successfully simulated. +message SimulationResponse { + GasInfo gas_info = 1 [(gogoproto.embed) = true, (gogoproto.nullable) = false]; + Result result = 2; +} + +// MsgData defines the data returned in a Result object during message +// execution. +message MsgData { + option deprecated = true; + option (gogoproto.stringer) = true; + + string msg_type = 1; + bytes data = 2; +} + +// TxMsgData defines a list of MsgData. A transaction will have a MsgData object +// for each message. +message TxMsgData { + option (gogoproto.stringer) = true; + + // data field is deprecated and not populated. + repeated MsgData data = 1 [deprecated = true]; + + // msg_responses contains the Msg handler responses packed into Anys. + // + // Since: cosmos-sdk 0.46 + repeated google.protobuf.Any msg_responses = 2; +} + +// SearchTxsResult defines a structure for querying txs pageable +message SearchTxsResult { + option (gogoproto.stringer) = true; + + // Count of all txs + uint64 total_count = 1; + // Count of txs in current page + uint64 count = 2; + // Index of current page, start from 1 + uint64 page_number = 3; + // Count of total pages + uint64 page_total = 4; + // Max count txs per page + uint64 limit = 5; + // List of txs in current page + repeated TxResponse txs = 6; +} diff --git a/packages/codegen/proto/cosmos/base/kv/v1beta1/kv.proto b/packages/codegen/proto/cosmos/base/kv/v1beta1/kv.proto new file mode 100644 index 00000000..4e9b8d28 --- /dev/null +++ b/packages/codegen/proto/cosmos/base/kv/v1beta1/kv.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; +package cosmos.base.kv.v1beta1; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/types/kv"; + +// Pairs defines a repeated slice of Pair objects. +message Pairs { + repeated Pair pairs = 1 [(gogoproto.nullable) = false]; +} + +// Pair defines a key/value bytes tuple. +message Pair { + bytes key = 1; + bytes value = 2; +} diff --git a/packages/codegen/proto/cosmos/base/query/v1beta1/pagination.proto b/packages/codegen/proto/cosmos/base/query/v1beta1/pagination.proto new file mode 100644 index 00000000..0a368144 --- /dev/null +++ b/packages/codegen/proto/cosmos/base/query/v1beta1/pagination.proto @@ -0,0 +1,56 @@ +syntax = "proto3"; +package cosmos.base.query.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/types/query"; + +// PageRequest is to be embedded in gRPC request messages for efficient +// pagination. Ex: +// +// message SomeRequest { +// Foo some_parameter = 1; +// PageRequest pagination = 2; +// } +message PageRequest { + // key is a value returned in PageResponse.next_key to begin + // querying the next page most efficiently. Only one of offset or key + // should be set. + bytes key = 1; + + // offset is a numeric offset that can be used when key is unavailable. + // It is less efficient than using key. Only one of offset or key should + // be set. + uint64 offset = 2; + + // limit is the total number of results to be returned in the result page. + // If left empty it will default to a value to be set by each app. + uint64 limit = 3; + + // count_total is set to true to indicate that the result set should include + // a count of the total number of items available for pagination in UIs. + // count_total is only respected when offset is used. It is ignored when key + // is set. + bool count_total = 4; + + // reverse is set to true if results are to be returned in the descending order. + // + // Since: cosmos-sdk 0.43 + bool reverse = 5; +} + +// PageResponse is to be embedded in gRPC response messages where the +// corresponding request message has used PageRequest. +// +// message SomeResponse { +// repeated Bar results = 1; +// PageResponse page = 2; +// } +message PageResponse { + // next_key is the key to be passed to PageRequest.key to + // query the next page most efficiently. It will be empty if + // there are no more results. + bytes next_key = 1; + + // total is total number of results available if PageRequest.count_total + // was set, its value is undefined otherwise + uint64 total = 2; +} diff --git a/packages/codegen/proto/cosmos/base/reflection/v1beta1/reflection.proto b/packages/codegen/proto/cosmos/base/reflection/v1beta1/reflection.proto new file mode 100644 index 00000000..22670e72 --- /dev/null +++ b/packages/codegen/proto/cosmos/base/reflection/v1beta1/reflection.proto @@ -0,0 +1,44 @@ +syntax = "proto3"; +package cosmos.base.reflection.v1beta1; + +import "google/api/annotations.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/client/grpc/reflection"; + +// ReflectionService defines a service for interface reflection. +service ReflectionService { + // ListAllInterfaces lists all the interfaces registered in the interface + // registry. + rpc ListAllInterfaces(ListAllInterfacesRequest) returns (ListAllInterfacesResponse) { + option (google.api.http).get = "/cosmos/base/reflection/v1beta1/interfaces"; + }; + + // ListImplementations list all the concrete types that implement a given + // interface. + rpc ListImplementations(ListImplementationsRequest) returns (ListImplementationsResponse) { + option (google.api.http).get = "/cosmos/base/reflection/v1beta1/interfaces/" + "{interface_name}/implementations"; + }; +} + +// ListAllInterfacesRequest is the request type of the ListAllInterfaces RPC. +message ListAllInterfacesRequest {} + +// ListAllInterfacesResponse is the response type of the ListAllInterfaces RPC. +message ListAllInterfacesResponse { + // interface_names is an array of all the registered interfaces. + repeated string interface_names = 1; +} + +// ListImplementationsRequest is the request type of the ListImplementations +// RPC. +message ListImplementationsRequest { + // interface_name defines the interface to query the implementations for. + string interface_name = 1; +} + +// ListImplementationsResponse is the response type of the ListImplementations +// RPC. +message ListImplementationsResponse { + repeated string implementation_message_names = 1; +} diff --git a/packages/codegen/proto/cosmos/base/reflection/v2alpha1/reflection.proto b/packages/codegen/proto/cosmos/base/reflection/v2alpha1/reflection.proto new file mode 100644 index 00000000..d5b04855 --- /dev/null +++ b/packages/codegen/proto/cosmos/base/reflection/v2alpha1/reflection.proto @@ -0,0 +1,218 @@ +// Since: cosmos-sdk 0.43 +syntax = "proto3"; +package cosmos.base.reflection.v2alpha1; + +import "google/api/annotations.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/server/grpc/reflection/v2alpha1"; + +// AppDescriptor describes a cosmos-sdk based application +message AppDescriptor { + // AuthnDescriptor provides information on how to authenticate transactions on the application + // NOTE: experimental and subject to change in future releases. + AuthnDescriptor authn = 1; + // chain provides the chain descriptor + ChainDescriptor chain = 2; + // codec provides metadata information regarding codec related types + CodecDescriptor codec = 3; + // configuration provides metadata information regarding the sdk.Config type + ConfigurationDescriptor configuration = 4; + // query_services provides metadata information regarding the available queriable endpoints + QueryServicesDescriptor query_services = 5; + // tx provides metadata information regarding how to send transactions to the given application + TxDescriptor tx = 6; +} + +// TxDescriptor describes the accepted transaction type +message TxDescriptor { + // fullname is the protobuf fullname of the raw transaction type (for instance the tx.Tx type) + // it is not meant to support polymorphism of transaction types, it is supposed to be used by + // reflection clients to understand if they can handle a specific transaction type in an application. + string fullname = 1; + // msgs lists the accepted application messages (sdk.Msg) + repeated MsgDescriptor msgs = 2; +} + +// AuthnDescriptor provides information on how to sign transactions without relying +// on the online RPCs GetTxMetadata and CombineUnsignedTxAndSignatures +message AuthnDescriptor { + // sign_modes defines the supported signature algorithm + repeated SigningModeDescriptor sign_modes = 1; +} + +// SigningModeDescriptor provides information on a signing flow of the application +// NOTE(fdymylja): here we could go as far as providing an entire flow on how +// to sign a message given a SigningModeDescriptor, but it's better to think about +// this another time +message SigningModeDescriptor { + // name defines the unique name of the signing mode + string name = 1; + // number is the unique int32 identifier for the sign_mode enum + int32 number = 2; + // authn_info_provider_method_fullname defines the fullname of the method to call to get + // the metadata required to authenticate using the provided sign_modes + string authn_info_provider_method_fullname = 3; +} + +// ChainDescriptor describes chain information of the application +message ChainDescriptor { + // id is the chain id + string id = 1; +} + +// CodecDescriptor describes the registered interfaces and provides metadata information on the types +message CodecDescriptor { + // interfaces is a list of the registerted interfaces descriptors + repeated InterfaceDescriptor interfaces = 1; +} + +// InterfaceDescriptor describes the implementation of an interface +message InterfaceDescriptor { + // fullname is the name of the interface + string fullname = 1; + // interface_accepting_messages contains information regarding the proto messages which contain the interface as + // google.protobuf.Any field + repeated InterfaceAcceptingMessageDescriptor interface_accepting_messages = 2; + // interface_implementers is a list of the descriptors of the interface implementers + repeated InterfaceImplementerDescriptor interface_implementers = 3; +} + +// InterfaceImplementerDescriptor describes an interface implementer +message InterfaceImplementerDescriptor { + // fullname is the protobuf queryable name of the interface implementer + string fullname = 1; + // type_url defines the type URL used when marshalling the type as any + // this is required so we can provide type safe google.protobuf.Any marshalling and + // unmarshalling, making sure that we don't accept just 'any' type + // in our interface fields + string type_url = 2; +} + +// InterfaceAcceptingMessageDescriptor describes a protobuf message which contains +// an interface represented as a google.protobuf.Any +message InterfaceAcceptingMessageDescriptor { + // fullname is the protobuf fullname of the type containing the interface + string fullname = 1; + // field_descriptor_names is a list of the protobuf name (not fullname) of the field + // which contains the interface as google.protobuf.Any (the interface is the same, but + // it can be in multiple fields of the same proto message) + repeated string field_descriptor_names = 2; +} + +// ConfigurationDescriptor contains metadata information on the sdk.Config +message ConfigurationDescriptor { + // bech32_account_address_prefix is the account address prefix + string bech32_account_address_prefix = 1; +} + +// MsgDescriptor describes a cosmos-sdk message that can be delivered with a transaction +message MsgDescriptor { + // msg_type_url contains the TypeURL of a sdk.Msg. + string msg_type_url = 1; +} + +// ReflectionService defines a service for application reflection. +service ReflectionService { + // GetAuthnDescriptor returns information on how to authenticate transactions in the application + // NOTE: this RPC is still experimental and might be subject to breaking changes or removal in + // future releases of the cosmos-sdk. + rpc GetAuthnDescriptor(GetAuthnDescriptorRequest) returns (GetAuthnDescriptorResponse) { + option (google.api.http).get = "/cosmos/base/reflection/v1beta1/app_descriptor/authn"; + } + // GetChainDescriptor returns the description of the chain + rpc GetChainDescriptor(GetChainDescriptorRequest) returns (GetChainDescriptorResponse) { + option (google.api.http).get = "/cosmos/base/reflection/v1beta1/app_descriptor/chain"; + }; + // GetCodecDescriptor returns the descriptor of the codec of the application + rpc GetCodecDescriptor(GetCodecDescriptorRequest) returns (GetCodecDescriptorResponse) { + option (google.api.http).get = "/cosmos/base/reflection/v1beta1/app_descriptor/codec"; + } + // GetConfigurationDescriptor returns the descriptor for the sdk.Config of the application + rpc GetConfigurationDescriptor(GetConfigurationDescriptorRequest) returns (GetConfigurationDescriptorResponse) { + option (google.api.http).get = "/cosmos/base/reflection/v1beta1/app_descriptor/configuration"; + } + // GetQueryServicesDescriptor returns the available gRPC queryable services of the application + rpc GetQueryServicesDescriptor(GetQueryServicesDescriptorRequest) returns (GetQueryServicesDescriptorResponse) { + option (google.api.http).get = "/cosmos/base/reflection/v1beta1/app_descriptor/query_services"; + } + // GetTxDescriptor returns information on the used transaction object and available msgs that can be used + rpc GetTxDescriptor(GetTxDescriptorRequest) returns (GetTxDescriptorResponse) { + option (google.api.http).get = "/cosmos/base/reflection/v1beta1/app_descriptor/tx_descriptor"; + } +} + +// GetAuthnDescriptorRequest is the request used for the GetAuthnDescriptor RPC +message GetAuthnDescriptorRequest {} +// GetAuthnDescriptorResponse is the response returned by the GetAuthnDescriptor RPC +message GetAuthnDescriptorResponse { + // authn describes how to authenticate to the application when sending transactions + AuthnDescriptor authn = 1; +} + +// GetChainDescriptorRequest is the request used for the GetChainDescriptor RPC +message GetChainDescriptorRequest {} +// GetChainDescriptorResponse is the response returned by the GetChainDescriptor RPC +message GetChainDescriptorResponse { + // chain describes application chain information + ChainDescriptor chain = 1; +} + +// GetCodecDescriptorRequest is the request used for the GetCodecDescriptor RPC +message GetCodecDescriptorRequest {} +// GetCodecDescriptorResponse is the response returned by the GetCodecDescriptor RPC +message GetCodecDescriptorResponse { + // codec describes the application codec such as registered interfaces and implementations + CodecDescriptor codec = 1; +} + +// GetConfigurationDescriptorRequest is the request used for the GetConfigurationDescriptor RPC +message GetConfigurationDescriptorRequest {} +// GetConfigurationDescriptorResponse is the response returned by the GetConfigurationDescriptor RPC +message GetConfigurationDescriptorResponse { + // config describes the application's sdk.Config + ConfigurationDescriptor config = 1; +} + +// GetQueryServicesDescriptorRequest is the request used for the GetQueryServicesDescriptor RPC +message GetQueryServicesDescriptorRequest {} +// GetQueryServicesDescriptorResponse is the response returned by the GetQueryServicesDescriptor RPC +message GetQueryServicesDescriptorResponse { + // queries provides information on the available queryable services + QueryServicesDescriptor queries = 1; +} + +// GetTxDescriptorRequest is the request used for the GetTxDescriptor RPC +message GetTxDescriptorRequest {} +// GetTxDescriptorResponse is the response returned by the GetTxDescriptor RPC +message GetTxDescriptorResponse { + // tx provides information on msgs that can be forwarded to the application + // alongside the accepted transaction protobuf type + TxDescriptor tx = 1; +} + +// QueryServicesDescriptor contains the list of cosmos-sdk queriable services +message QueryServicesDescriptor { + // query_services is a list of cosmos-sdk QueryServiceDescriptor + repeated QueryServiceDescriptor query_services = 1; +} + +// QueryServiceDescriptor describes a cosmos-sdk queryable service +message QueryServiceDescriptor { + // fullname is the protobuf fullname of the service descriptor + string fullname = 1; + // is_module describes if this service is actually exposed by an application's module + bool is_module = 2; + // methods provides a list of query service methods + repeated QueryMethodDescriptor methods = 3; +} + +// QueryMethodDescriptor describes a queryable method of a query service +// no other info is provided beside method name and tendermint queryable path +// because it would be redundant with the grpc reflection service +message QueryMethodDescriptor { + // name is the protobuf name (not fullname) of the method + string name = 1; + // full_query_path is the path that can be used to query + // this method via tendermint abci.Query + string full_query_path = 2; +} diff --git a/packages/codegen/proto/cosmos/base/snapshots/v1beta1/snapshot.proto b/packages/codegen/proto/cosmos/base/snapshots/v1beta1/snapshot.proto new file mode 100644 index 00000000..a89e0b4c --- /dev/null +++ b/packages/codegen/proto/cosmos/base/snapshots/v1beta1/snapshot.proto @@ -0,0 +1,70 @@ +syntax = "proto3"; +package cosmos.base.snapshots.v1beta1; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/snapshots/types"; + +// Snapshot contains Tendermint state sync snapshot info. +message Snapshot { + uint64 height = 1; + uint32 format = 2; + uint32 chunks = 3; + bytes hash = 4; + Metadata metadata = 5 [(gogoproto.nullable) = false]; +} + +// Metadata contains SDK-specific snapshot metadata. +message Metadata { + repeated bytes chunk_hashes = 1; // SHA-256 chunk hashes +} + +// SnapshotItem is an item contained in a rootmulti.Store snapshot. +message SnapshotItem { + // item is the specific type of snapshot item. + oneof item { + SnapshotStoreItem store = 1; + SnapshotIAVLItem iavl = 2 [(gogoproto.customname) = "IAVL"]; + SnapshotExtensionMeta extension = 3; + SnapshotExtensionPayload extension_payload = 4; + SnapshotKVItem kv = 5 [(gogoproto.customname) = "KV"]; + SnapshotSchema schema = 6; + } +} + +// SnapshotStoreItem contains metadata about a snapshotted store. +message SnapshotStoreItem { + string name = 1; +} + +// SnapshotIAVLItem is an exported IAVL node. +message SnapshotIAVLItem { + bytes key = 1; + bytes value = 2; + // version is block height + int64 version = 3; + // height is depth of the tree. + int32 height = 4; +} + +// SnapshotExtensionMeta contains metadata about an external snapshotter. +message SnapshotExtensionMeta { + string name = 1; + uint32 format = 2; +} + +// SnapshotExtensionPayload contains payloads of an external snapshotter. +message SnapshotExtensionPayload { + bytes payload = 1; +} + +// SnapshotKVItem is an exported Key/Value Pair +message SnapshotKVItem { + bytes key = 1; + bytes value = 2; +} + +// SnapshotSchema is an exported schema of smt store +message SnapshotSchema{ + repeated bytes keys = 1; +} diff --git a/packages/codegen/proto/cosmos/base/store/v1beta1/commit_info.proto b/packages/codegen/proto/cosmos/base/store/v1beta1/commit_info.proto new file mode 100644 index 00000000..98a33d30 --- /dev/null +++ b/packages/codegen/proto/cosmos/base/store/v1beta1/commit_info.proto @@ -0,0 +1,29 @@ +syntax = "proto3"; +package cosmos.base.store.v1beta1; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/store/types"; + +// CommitInfo defines commit information used by the multi-store when committing +// a version/height. +message CommitInfo { + int64 version = 1; + repeated StoreInfo store_infos = 2 [(gogoproto.nullable) = false]; +} + +// StoreInfo defines store-specific commit information. It contains a reference +// between a store name and the commit ID. +message StoreInfo { + string name = 1; + CommitID commit_id = 2 [(gogoproto.nullable) = false]; +} + +// CommitID defines the committment information when a specific store is +// committed. +message CommitID { + option (gogoproto.goproto_stringer) = false; + + int64 version = 1; + bytes hash = 2; +} diff --git a/packages/codegen/proto/cosmos/base/store/v1beta1/listening.proto b/packages/codegen/proto/cosmos/base/store/v1beta1/listening.proto new file mode 100644 index 00000000..35999710 --- /dev/null +++ b/packages/codegen/proto/cosmos/base/store/v1beta1/listening.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; +package cosmos.base.store.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/store/types"; + +// StoreKVPair is a KVStore KVPair used for listening to state changes (Sets and Deletes) +// It optionally includes the StoreKey for the originating KVStore and a Boolean flag to distinguish between Sets and +// Deletes +// +// Since: cosmos-sdk 0.43 +message StoreKVPair { + string store_key = 1; // the store key for the KVStore this pair originates from + bool delete = 2; // true indicates a delete operation, false indicates a set operation + bytes key = 3; + bytes value = 4; +} diff --git a/packages/codegen/proto/cosmos/base/tendermint/v1beta1/query.proto b/packages/codegen/proto/cosmos/base/tendermint/v1beta1/query.proto new file mode 100644 index 00000000..96a46e53 --- /dev/null +++ b/packages/codegen/proto/cosmos/base/tendermint/v1beta1/query.proto @@ -0,0 +1,138 @@ +syntax = "proto3"; +package cosmos.base.tendermint.v1beta1; + +import "google/protobuf/any.proto"; +import "google/api/annotations.proto"; +import "tendermint/p2p/types.proto"; +import "tendermint/types/block.proto"; +import "tendermint/types/types.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/client/grpc/tmservice"; + +// Service defines the gRPC querier service for tendermint queries. +service Service { + // GetNodeInfo queries the current node info. + rpc GetNodeInfo(GetNodeInfoRequest) returns (GetNodeInfoResponse) { + option (google.api.http).get = "/cosmos/base/tendermint/v1beta1/node_info"; + } + // GetSyncing queries node syncing. + rpc GetSyncing(GetSyncingRequest) returns (GetSyncingResponse) { + option (google.api.http).get = "/cosmos/base/tendermint/v1beta1/syncing"; + } + // GetLatestBlock returns the latest block. + rpc GetLatestBlock(GetLatestBlockRequest) returns (GetLatestBlockResponse) { + option (google.api.http).get = "/cosmos/base/tendermint/v1beta1/blocks/latest"; + } + // GetBlockByHeight queries block for given height. + rpc GetBlockByHeight(GetBlockByHeightRequest) returns (GetBlockByHeightResponse) { + option (google.api.http).get = "/cosmos/base/tendermint/v1beta1/blocks/{height}"; + } + + // GetLatestValidatorSet queries latest validator-set. + rpc GetLatestValidatorSet(GetLatestValidatorSetRequest) returns (GetLatestValidatorSetResponse) { + option (google.api.http).get = "/cosmos/base/tendermint/v1beta1/validatorsets/latest"; + } + // GetValidatorSetByHeight queries validator-set at a given height. + rpc GetValidatorSetByHeight(GetValidatorSetByHeightRequest) returns (GetValidatorSetByHeightResponse) { + option (google.api.http).get = "/cosmos/base/tendermint/v1beta1/validatorsets/{height}"; + } +} + +// GetValidatorSetByHeightRequest is the request type for the Query/GetValidatorSetByHeight RPC method. +message GetValidatorSetByHeightRequest { + int64 height = 1; + // pagination defines an pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// GetValidatorSetByHeightResponse is the response type for the Query/GetValidatorSetByHeight RPC method. +message GetValidatorSetByHeightResponse { + int64 block_height = 1; + repeated Validator validators = 2; + // pagination defines an pagination for the response. + cosmos.base.query.v1beta1.PageResponse pagination = 3; +} + +// GetLatestValidatorSetRequest is the request type for the Query/GetValidatorSetByHeight RPC method. +message GetLatestValidatorSetRequest { + // pagination defines an pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// GetLatestValidatorSetResponse is the response type for the Query/GetValidatorSetByHeight RPC method. +message GetLatestValidatorSetResponse { + int64 block_height = 1; + repeated Validator validators = 2; + // pagination defines an pagination for the response. + cosmos.base.query.v1beta1.PageResponse pagination = 3; +} + +// Validator is the type for the validator-set. +message Validator { + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + google.protobuf.Any pub_key = 2; + int64 voting_power = 3; + int64 proposer_priority = 4; +} + +// GetBlockByHeightRequest is the request type for the Query/GetBlockByHeight RPC method. +message GetBlockByHeightRequest { + int64 height = 1; +} + +// GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method. +message GetBlockByHeightResponse { + .tendermint.types.BlockID block_id = 1; + .tendermint.types.Block block = 2; +} + +// GetLatestBlockRequest is the request type for the Query/GetLatestBlock RPC method. +message GetLatestBlockRequest {} + +// GetLatestBlockResponse is the response type for the Query/GetLatestBlock RPC method. +message GetLatestBlockResponse { + .tendermint.types.BlockID block_id = 1; + .tendermint.types.Block block = 2; +} + +// GetSyncingRequest is the request type for the Query/GetSyncing RPC method. +message GetSyncingRequest {} + +// GetSyncingResponse is the response type for the Query/GetSyncing RPC method. +message GetSyncingResponse { + bool syncing = 1; +} + +// GetNodeInfoRequest is the request type for the Query/GetNodeInfo RPC method. +message GetNodeInfoRequest {} + +// GetNodeInfoResponse is the response type for the Query/GetNodeInfo RPC method. +message GetNodeInfoResponse { + .tendermint.p2p.NodeInfo node_info = 1; + VersionInfo application_version = 2; +} + +// VersionInfo is the type for the GetNodeInfoResponse message. +message VersionInfo { + string name = 1; + string app_name = 2; + string version = 3; + string git_commit = 4; + string build_tags = 5; + string go_version = 6; + repeated Module build_deps = 7; + // Since: cosmos-sdk 0.43 + string cosmos_sdk_version = 8; +} + +// Module is the type for VersionInfo +message Module { + // module path + string path = 1; + // module version + string version = 2; + // checksum + string sum = 3; +} diff --git a/packages/codegen/proto/cosmos/base/v1beta1/coin.proto b/packages/codegen/proto/cosmos/base/v1beta1/coin.proto new file mode 100644 index 00000000..69e67e09 --- /dev/null +++ b/packages/codegen/proto/cosmos/base/v1beta1/coin.proto @@ -0,0 +1,43 @@ +syntax = "proto3"; +package cosmos.base.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/types"; +option (gogoproto.goproto_stringer_all) = false; +option (gogoproto.stringer_all) = false; + +// Coin defines a token with a denomination and an amount. +// +// NOTE: The amount field is an Int which implements the custom method +// signatures required by gogoproto. +message Coin { + option (gogoproto.equal) = true; + + string denom = 1; + string amount = 2 + [(cosmos_proto.scalar) = "cosmos.Int", (gogoproto.customtype) = "Int", (gogoproto.nullable) = false]; +} + +// DecCoin defines a token with a denomination and a decimal amount. +// +// NOTE: The amount field is an Dec which implements the custom method +// signatures required by gogoproto. +message DecCoin { + option (gogoproto.equal) = true; + + string denom = 1; + string amount = 2 + [(cosmos_proto.scalar) = "cosmos.Dec", (gogoproto.customtype) = "Dec", (gogoproto.nullable) = false]; +} + +// IntProto defines a Protobuf wrapper around an Int object. +message IntProto { + string int = 1 [(cosmos_proto.scalar) = "cosmos.Int", (gogoproto.customtype) = "Int", (gogoproto.nullable) = false]; +} + +// DecProto defines a Protobuf wrapper around a Dec object. +message DecProto { + string dec = 1 [(cosmos_proto.scalar) = "cosmos.Dec", (gogoproto.customtype) = "Dec", (gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/capability/v1beta1/capability.proto b/packages/codegen/proto/cosmos/capability/v1beta1/capability.proto new file mode 100644 index 00000000..c433566d --- /dev/null +++ b/packages/codegen/proto/cosmos/capability/v1beta1/capability.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; +package cosmos.capability.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/capability/types"; + +import "gogoproto/gogo.proto"; + +// Capability defines an implementation of an object capability. The index +// provided to a Capability must be globally unique. +message Capability { + option (gogoproto.goproto_stringer) = false; + + uint64 index = 1; +} + +// Owner defines a single capability owner. An owner is defined by the name of +// capability and the module name. +message Owner { + option (gogoproto.goproto_stringer) = false; + option (gogoproto.goproto_getters) = false; + + string module = 1; + string name = 2; +} + +// CapabilityOwners defines a set of owners of a single Capability. The set of +// owners must be unique. +message CapabilityOwners { + repeated Owner owners = 1 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/capability/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/capability/v1beta1/genesis.proto new file mode 100644 index 00000000..b5482439 --- /dev/null +++ b/packages/codegen/proto/cosmos/capability/v1beta1/genesis.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; +package cosmos.capability.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos/capability/v1beta1/capability.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/capability/types"; + +// GenesisOwners defines the capability owners with their corresponding index. +message GenesisOwners { + // index is the index of the capability owner. + uint64 index = 1; + + // index_owners are the owners at the given index. + CapabilityOwners index_owners = 2 [(gogoproto.nullable) = false]; +} + +// GenesisState defines the capability module's genesis state. +message GenesisState { + // index is the capability global index. + uint64 index = 1; + + // owners represents a map from index to owners of the capability index + // index key is string to allow amino marshalling. + repeated GenesisOwners owners = 2 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/crisis/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/crisis/v1beta1/genesis.proto new file mode 100644 index 00000000..5c291604 --- /dev/null +++ b/packages/codegen/proto/cosmos/crisis/v1beta1/genesis.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; +package cosmos.crisis.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/crisis/types"; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; + +// GenesisState defines the crisis module's genesis state. +message GenesisState { + // constant_fee is the fee used to verify the invariant in the crisis + // module. + cosmos.base.v1beta1.Coin constant_fee = 3 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/crisis/v1beta1/tx.proto b/packages/codegen/proto/cosmos/crisis/v1beta1/tx.proto new file mode 100644 index 00000000..fea9059f --- /dev/null +++ b/packages/codegen/proto/cosmos/crisis/v1beta1/tx.proto @@ -0,0 +1,29 @@ +syntax = "proto3"; +package cosmos.crisis.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/crisis/types"; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/msg/v1/msg.proto"; + +// Msg defines the bank Msg service. +service Msg { + // VerifyInvariant defines a method to verify a particular invariance. + rpc VerifyInvariant(MsgVerifyInvariant) returns (MsgVerifyInvariantResponse); +} + +// MsgVerifyInvariant represents a message to verify a particular invariance. +message MsgVerifyInvariant { + option (cosmos.msg.v1.signer) = "sender"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string sender = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string invariant_module_name = 2; + string invariant_route = 3; +} + +// MsgVerifyInvariantResponse defines the Msg/VerifyInvariant response type. +message MsgVerifyInvariantResponse {} diff --git a/packages/codegen/proto/cosmos/crypto/ed25519/keys.proto b/packages/codegen/proto/cosmos/crypto/ed25519/keys.proto new file mode 100644 index 00000000..6ffec344 --- /dev/null +++ b/packages/codegen/proto/cosmos/crypto/ed25519/keys.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; +package cosmos.crypto.ed25519; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"; + +// PubKey is an ed25519 public key for handling Tendermint keys in SDK. +// It's needed for Any serialization and SDK compatibility. +// It must not be used in a non Tendermint key context because it doesn't implement +// ADR-28. Nevertheless, you will like to use ed25519 in app user level +// then you must create a new proto message and follow ADR-28 for Address construction. +message PubKey { + option (gogoproto.goproto_stringer) = false; + + bytes key = 1 [(gogoproto.casttype) = "crypto/ed25519.PublicKey"]; +} + +// Deprecated: PrivKey defines a ed25519 private key. +// NOTE: ed25519 keys must not be used in SDK apps except in a tendermint validator context. +message PrivKey { + bytes key = 1 [(gogoproto.casttype) = "crypto/ed25519.PrivateKey"]; +} diff --git a/packages/codegen/proto/cosmos/crypto/hd/v1/hd.proto b/packages/codegen/proto/cosmos/crypto/hd/v1/hd.proto new file mode 100644 index 00000000..e4a95afc --- /dev/null +++ b/packages/codegen/proto/cosmos/crypto/hd/v1/hd.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; +package cosmos.crypto.hd.v1; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/crypto/hd"; +option (gogoproto.goproto_getters_all) = false; + +// BIP44Params is used as path field in ledger item in Record. +message BIP44Params { + option (gogoproto.goproto_stringer) = false; + // purpose is a constant set to 44' (or 0x8000002C) following the BIP43 recommendation + uint32 purpose = 1; + // coin_type is a constant that improves privacy + uint32 coin_type = 2; + // account splits the key space into independent user identities + uint32 account = 3; + // change is a constant used for public derivation. Constant 0 is used for external chain and constant 1 for internal + // chain. + bool change = 4; + // address_index is used as child index in BIP32 derivation + uint32 address_index = 5; +} diff --git a/packages/codegen/proto/cosmos/crypto/keyring/v1/record.proto b/packages/codegen/proto/cosmos/crypto/keyring/v1/record.proto new file mode 100644 index 00000000..9b2d3c96 --- /dev/null +++ b/packages/codegen/proto/cosmos/crypto/keyring/v1/record.proto @@ -0,0 +1,47 @@ +syntax = "proto3"; +package cosmos.crypto.keyring.v1; + +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; +import "cosmos/crypto/hd/v1/hd.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/crypto/keyring"; +option (gogoproto.goproto_getters_all) = false; + +// Record is used for representing a key in the keyring. +message Record { + // name represents a name of Record + string name = 1; + // pub_key represents a public key in any format + google.protobuf.Any pub_key = 2; + + // Record contains one of the following items + oneof item { + // local stores the public information about a locally stored key + Local local = 3; + // ledger stores the public information about a Ledger key + Ledger ledger = 4; + // Multi does not store any information. + Multi multi = 5; + // Offline does not store any information. + Offline offline = 6; + } + + // Item is a keyring item stored in a keyring backend. + // Local item + message Local { + google.protobuf.Any priv_key = 1; + string priv_key_type = 2; + } + + // Ledger item + message Ledger { + hd.v1.BIP44Params path = 1; + } + + // Multi item + message Multi {} + + // Offline item + message Offline {} +} diff --git a/packages/codegen/proto/cosmos/crypto/multisig/keys.proto b/packages/codegen/proto/cosmos/crypto/multisig/keys.proto new file mode 100644 index 00000000..7a11fe33 --- /dev/null +++ b/packages/codegen/proto/cosmos/crypto/multisig/keys.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; +package cosmos.crypto.multisig; + +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/crypto/keys/multisig"; + +// LegacyAminoPubKey specifies a public key type +// which nests multiple public keys and a threshold, +// it uses legacy amino address rules. +message LegacyAminoPubKey { + option (gogoproto.goproto_getters) = false; + + uint32 threshold = 1; + repeated google.protobuf.Any public_keys = 2 [(gogoproto.customname) = "PubKeys"]; +} diff --git a/packages/codegen/proto/cosmos/crypto/multisig/v1beta1/multisig.proto b/packages/codegen/proto/cosmos/crypto/multisig/v1beta1/multisig.proto new file mode 100644 index 00000000..bf671f17 --- /dev/null +++ b/packages/codegen/proto/cosmos/crypto/multisig/v1beta1/multisig.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; +package cosmos.crypto.multisig.v1beta1; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/crypto/types"; + +// MultiSignature wraps the signatures from a multisig.LegacyAminoPubKey. +// See cosmos.tx.v1betata1.ModeInfo.Multi for how to specify which signers +// signed and with which modes. +message MultiSignature { + option (gogoproto.goproto_unrecognized) = true; + repeated bytes signatures = 1; +} + +// CompactBitArray is an implementation of a space efficient bit array. +// This is used to ensure that the encoded data takes up a minimal amount of +// space after proto encoding. +// This is not thread safe, and is not intended for concurrent usage. +message CompactBitArray { + option (gogoproto.goproto_stringer) = false; + + uint32 extra_bits_stored = 1; + bytes elems = 2; +} diff --git a/packages/codegen/proto/cosmos/crypto/secp256k1/keys.proto b/packages/codegen/proto/cosmos/crypto/secp256k1/keys.proto new file mode 100644 index 00000000..a2272571 --- /dev/null +++ b/packages/codegen/proto/cosmos/crypto/secp256k1/keys.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; +package cosmos.crypto.secp256k1; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"; + +// PubKey defines a secp256k1 public key +// Key is the compressed form of the pubkey. The first byte depends is a 0x02 byte +// if the y-coordinate is the lexicographically largest of the two associated with +// the x-coordinate. Otherwise the first byte is a 0x03. +// This prefix is followed with the x-coordinate. +message PubKey { + option (gogoproto.goproto_stringer) = false; + + bytes key = 1; +} + +// PrivKey defines a secp256k1 private key. +message PrivKey { + bytes key = 1; +} diff --git a/packages/codegen/proto/cosmos/crypto/secp256r1/keys.proto b/packages/codegen/proto/cosmos/crypto/secp256r1/keys.proto new file mode 100644 index 00000000..2e96c6e3 --- /dev/null +++ b/packages/codegen/proto/cosmos/crypto/secp256r1/keys.proto @@ -0,0 +1,23 @@ +// Since: cosmos-sdk 0.43 +syntax = "proto3"; +package cosmos.crypto.secp256r1; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/crypto/keys/secp256r1"; +option (gogoproto.messagename_all) = true; +option (gogoproto.goproto_stringer_all) = false; +option (gogoproto.goproto_getters_all) = false; + +// PubKey defines a secp256r1 ECDSA public key. +message PubKey { + // Point on secp256r1 curve in a compressed representation as specified in section + // 4.3.6 of ANSI X9.62: https://webstore.ansi.org/standards/ascx9/ansix9621998 + bytes key = 1 [(gogoproto.customtype) = "ecdsaPK"]; +} + +// PrivKey defines a secp256r1 ECDSA private key. +message PrivKey { + // secret number serialized using big-endian encoding + bytes secret = 1 [(gogoproto.customtype) = "ecdsaSK"]; +} diff --git a/packages/codegen/proto/cosmos/distribution/v1beta1/distribution.proto b/packages/codegen/proto/cosmos/distribution/v1beta1/distribution.proto new file mode 100644 index 00000000..1afe25ae --- /dev/null +++ b/packages/codegen/proto/cosmos/distribution/v1beta1/distribution.proto @@ -0,0 +1,154 @@ +syntax = "proto3"; +package cosmos.distribution.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/distribution/types"; +option (gogoproto.equal_all) = true; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos_proto/cosmos.proto"; + +// Params defines the set of params for the distribution module. +message Params { + option (gogoproto.goproto_stringer) = false; + string community_tax = 1 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + string base_proposer_reward = 2 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + string bonus_proposer_reward = 3 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + bool withdraw_addr_enabled = 4; +} + +// ValidatorHistoricalRewards represents historical rewards for a validator. +// Height is implicit within the store key. +// Cumulative reward ratio is the sum from the zeroeth period +// until this period of rewards / tokens, per the spec. +// The reference count indicates the number of objects +// which might need to reference this historical entry at any point. +// ReferenceCount = +// number of outstanding delegations which ended the associated period (and +// might need to read that record) +// + number of slashes which ended the associated period (and might need to +// read that record) +// + one per validator for the zeroeth period, set on initialization +message ValidatorHistoricalRewards { + repeated cosmos.base.v1beta1.DecCoin cumulative_reward_ratio = 1 + [(gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.DecCoins", (gogoproto.nullable) = false]; + uint32 reference_count = 2; +} + +// ValidatorCurrentRewards represents current rewards and current +// period for a validator kept as a running counter and incremented +// each block as long as the validator's tokens remain constant. +message ValidatorCurrentRewards { + repeated cosmos.base.v1beta1.DecCoin rewards = 1 + [(gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.DecCoins", (gogoproto.nullable) = false]; + uint64 period = 2; +} + +// ValidatorAccumulatedCommission represents accumulated commission +// for a validator kept as a running counter, can be withdrawn at any time. +message ValidatorAccumulatedCommission { + repeated cosmos.base.v1beta1.DecCoin commission = 1 + [(gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.DecCoins", (gogoproto.nullable) = false]; +} + +// ValidatorOutstandingRewards represents outstanding (un-withdrawn) rewards +// for a validator inexpensive to track, allows simple sanity checks. +message ValidatorOutstandingRewards { + repeated cosmos.base.v1beta1.DecCoin rewards = 1 + [(gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.DecCoins", (gogoproto.nullable) = false]; +} + +// ValidatorSlashEvent represents a validator slash event. +// Height is implicit within the store key. +// This is needed to calculate appropriate amount of staking tokens +// for delegations which are withdrawn after a slash has occurred. +message ValidatorSlashEvent { + uint64 validator_period = 1; + string fraction = 2 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; +} + +// ValidatorSlashEvents is a collection of ValidatorSlashEvent messages. +message ValidatorSlashEvents { + option (gogoproto.goproto_stringer) = false; + repeated ValidatorSlashEvent validator_slash_events = 1 [(gogoproto.nullable) = false]; +} + +// FeePool is the global fee pool for distribution. +message FeePool { + repeated cosmos.base.v1beta1.DecCoin community_pool = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.DecCoins"]; +} + +// CommunityPoolSpendProposal details a proposal for use of community funds, +// together with how many coins are proposed to be spent, and to which +// recipient account. +message CommunityPoolSpendProposal { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + string title = 1; + string description = 2; + string recipient = 3; + repeated cosmos.base.v1beta1.Coin amount = 4 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; +} + +// DelegatorStartingInfo represents the starting info for a delegator reward +// period. It tracks the previous validator period, the delegation's amount of +// staking token, and the creation height (to check later on if any slashes have +// occurred). NOTE: Even though validators are slashed to whole staking tokens, +// the delegators within the validator may be left with less than a full token, +// thus sdk.Dec is used. +message DelegatorStartingInfo { + uint64 previous_period = 1; + string stake = 2 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + uint64 height = 3 [(gogoproto.jsontag) = "creation_height"]; +} + +// DelegationDelegatorReward represents the properties +// of a delegator's delegation reward. +message DelegationDelegatorReward { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = true; + + string validator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + repeated cosmos.base.v1beta1.DecCoin reward = 2 + [(gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.DecCoins", (gogoproto.nullable) = false]; +} + +// CommunityPoolSpendProposalWithDeposit defines a CommunityPoolSpendProposal +// with a deposit +message CommunityPoolSpendProposalWithDeposit { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = true; + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + string title = 1; + string description = 2; + string recipient = 3; + string amount = 4; + string deposit = 5; +} diff --git a/packages/codegen/proto/cosmos/distribution/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/distribution/v1beta1/genesis.proto new file mode 100644 index 00000000..4662e8df --- /dev/null +++ b/packages/codegen/proto/cosmos/distribution/v1beta1/genesis.proto @@ -0,0 +1,144 @@ +syntax = "proto3"; +package cosmos.distribution.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/distribution/types"; +option (gogoproto.equal_all) = true; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos/distribution/v1beta1/distribution.proto"; +import "cosmos_proto/cosmos.proto"; + +// DelegatorWithdrawInfo is the address for where distributions rewards are +// withdrawn to by default this struct is only used at genesis to feed in +// default withdraw addresses. +message DelegatorWithdrawInfo { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // delegator_address is the address of the delegator. + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // withdraw_address is the address to withdraw the delegation rewards to. + string withdraw_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// ValidatorOutstandingRewardsRecord is used for import/export via genesis json. +message ValidatorOutstandingRewardsRecord { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // validator_address is the address of the validator. + string validator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // outstanding_rewards represents the oustanding rewards of a validator. + repeated cosmos.base.v1beta1.DecCoin outstanding_rewards = 2 + [(gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.DecCoins", (gogoproto.nullable) = false]; +} + +// ValidatorAccumulatedCommissionRecord is used for import / export via genesis +// json. +message ValidatorAccumulatedCommissionRecord { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // validator_address is the address of the validator. + string validator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // accumulated is the accumulated commission of a validator. + ValidatorAccumulatedCommission accumulated = 2 [(gogoproto.nullable) = false]; +} + +// ValidatorHistoricalRewardsRecord is used for import / export via genesis +// json. +message ValidatorHistoricalRewardsRecord { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // validator_address is the address of the validator. + string validator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // period defines the period the historical rewards apply to. + uint64 period = 2; + + // rewards defines the historical rewards of a validator. + ValidatorHistoricalRewards rewards = 3 [(gogoproto.nullable) = false]; +} + +// ValidatorCurrentRewardsRecord is used for import / export via genesis json. +message ValidatorCurrentRewardsRecord { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // validator_address is the address of the validator. + string validator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // rewards defines the current rewards of a validator. + ValidatorCurrentRewards rewards = 2 [(gogoproto.nullable) = false]; +} + +// DelegatorStartingInfoRecord used for import / export via genesis json. +message DelegatorStartingInfoRecord { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // delegator_address is the address of the delegator. + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // validator_address is the address of the validator. + string validator_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // starting_info defines the starting info of a delegator. + DelegatorStartingInfo starting_info = 3 [(gogoproto.nullable) = false]; +} + +// ValidatorSlashEventRecord is used for import / export via genesis json. +message ValidatorSlashEventRecord { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // validator_address is the address of the validator. + string validator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // height defines the block height at which the slash event occured. + uint64 height = 2; + // period is the period of the slash event. + uint64 period = 3; + // validator_slash_event describes the slash event. + ValidatorSlashEvent validator_slash_event = 4 [(gogoproto.nullable) = false]; +} + +// GenesisState defines the distribution module's genesis state. +message GenesisState { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // params defines all the paramaters of the module. + Params params = 1 [(gogoproto.nullable) = false]; + + // fee_pool defines the fee pool at genesis. + FeePool fee_pool = 2 [(gogoproto.nullable) = false]; + + // fee_pool defines the delegator withdraw infos at genesis. + repeated DelegatorWithdrawInfo delegator_withdraw_infos = 3 [(gogoproto.nullable) = false]; + + // fee_pool defines the previous proposer at genesis. + string previous_proposer = 4 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // fee_pool defines the outstanding rewards of all validators at genesis. + repeated ValidatorOutstandingRewardsRecord outstanding_rewards = 5 [(gogoproto.nullable) = false]; + + // fee_pool defines the accumulated commisions of all validators at genesis. + repeated ValidatorAccumulatedCommissionRecord validator_accumulated_commissions = 6 [(gogoproto.nullable) = false]; + + // fee_pool defines the historical rewards of all validators at genesis. + repeated ValidatorHistoricalRewardsRecord validator_historical_rewards = 7 [(gogoproto.nullable) = false]; + + // fee_pool defines the current rewards of all validators at genesis. + repeated ValidatorCurrentRewardsRecord validator_current_rewards = 8 [(gogoproto.nullable) = false]; + + // fee_pool defines the delegator starting infos at genesis. + repeated DelegatorStartingInfoRecord delegator_starting_infos = 9 [(gogoproto.nullable) = false]; + + // fee_pool defines the validator slash events at genesis. + repeated ValidatorSlashEventRecord validator_slash_events = 10 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/distribution/v1beta1/query.proto b/packages/codegen/proto/cosmos/distribution/v1beta1/query.proto new file mode 100644 index 00000000..a09413fc --- /dev/null +++ b/packages/codegen/proto/cosmos/distribution/v1beta1/query.proto @@ -0,0 +1,219 @@ +syntax = "proto3"; +package cosmos.distribution.v1beta1; + +import "cosmos/base/query/v1beta1/pagination.proto"; +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos/distribution/v1beta1/distribution.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/distribution/types"; + +// Query defines the gRPC querier service for distribution module. +service Query { + // Params queries params of the distribution module. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/cosmos/distribution/v1beta1/params"; + } + + // ValidatorOutstandingRewards queries rewards of a validator address. + rpc ValidatorOutstandingRewards(QueryValidatorOutstandingRewardsRequest) + returns (QueryValidatorOutstandingRewardsResponse) { + option (google.api.http).get = "/cosmos/distribution/v1beta1/validators/" + "{validator_address}/outstanding_rewards"; + } + + // ValidatorCommission queries accumulated commission for a validator. + rpc ValidatorCommission(QueryValidatorCommissionRequest) returns (QueryValidatorCommissionResponse) { + option (google.api.http).get = "/cosmos/distribution/v1beta1/validators/" + "{validator_address}/commission"; + } + + // ValidatorSlashes queries slash events of a validator. + rpc ValidatorSlashes(QueryValidatorSlashesRequest) returns (QueryValidatorSlashesResponse) { + option (google.api.http).get = "/cosmos/distribution/v1beta1/validators/{validator_address}/slashes"; + } + + // DelegationRewards queries the total rewards accrued by a delegation. + rpc DelegationRewards(QueryDelegationRewardsRequest) returns (QueryDelegationRewardsResponse) { + option (google.api.http).get = "/cosmos/distribution/v1beta1/delegators/{delegator_address}/rewards/" + "{validator_address}"; + } + + // DelegationTotalRewards queries the total rewards accrued by a each + // validator. + rpc DelegationTotalRewards(QueryDelegationTotalRewardsRequest) returns (QueryDelegationTotalRewardsResponse) { + option (google.api.http).get = "/cosmos/distribution/v1beta1/delegators/{delegator_address}/rewards"; + } + + // DelegatorValidators queries the validators of a delegator. + rpc DelegatorValidators(QueryDelegatorValidatorsRequest) returns (QueryDelegatorValidatorsResponse) { + option (google.api.http).get = "/cosmos/distribution/v1beta1/delegators/" + "{delegator_address}/validators"; + } + + // DelegatorWithdrawAddress queries withdraw address of a delegator. + rpc DelegatorWithdrawAddress(QueryDelegatorWithdrawAddressRequest) returns (QueryDelegatorWithdrawAddressResponse) { + option (google.api.http).get = "/cosmos/distribution/v1beta1/delegators/" + "{delegator_address}/withdraw_address"; + } + + // CommunityPool queries the community pool coins. + rpc CommunityPool(QueryCommunityPoolRequest) returns (QueryCommunityPoolResponse) { + option (google.api.http).get = "/cosmos/distribution/v1beta1/community_pool"; + } +} + +// QueryParamsRequest is the request type for the Query/Params RPC method. +message QueryParamsRequest {} + +// QueryParamsResponse is the response type for the Query/Params RPC method. +message QueryParamsResponse { + // params defines the parameters of the module. + Params params = 1 [(gogoproto.nullable) = false]; +} + +// QueryValidatorOutstandingRewardsRequest is the request type for the +// Query/ValidatorOutstandingRewards RPC method. +message QueryValidatorOutstandingRewardsRequest { + // validator_address defines the validator address to query for. + string validator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryValidatorOutstandingRewardsResponse is the response type for the +// Query/ValidatorOutstandingRewards RPC method. +message QueryValidatorOutstandingRewardsResponse { + ValidatorOutstandingRewards rewards = 1 [(gogoproto.nullable) = false]; +} + +// QueryValidatorCommissionRequest is the request type for the +// Query/ValidatorCommission RPC method +message QueryValidatorCommissionRequest { + // validator_address defines the validator address to query for. + string validator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryValidatorCommissionResponse is the response type for the +// Query/ValidatorCommission RPC method +message QueryValidatorCommissionResponse { + // commission defines the commision the validator received. + ValidatorAccumulatedCommission commission = 1 [(gogoproto.nullable) = false]; +} + +// QueryValidatorSlashesRequest is the request type for the +// Query/ValidatorSlashes RPC method +message QueryValidatorSlashesRequest { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = true; + + // validator_address defines the validator address to query for. + string validator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // starting_height defines the optional starting height to query the slashes. + uint64 starting_height = 2; + // starting_height defines the optional ending height to query the slashes. + uint64 ending_height = 3; + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 4; +} + +// QueryValidatorSlashesResponse is the response type for the +// Query/ValidatorSlashes RPC method. +message QueryValidatorSlashesResponse { + // slashes defines the slashes the validator received. + repeated ValidatorSlashEvent slashes = 1 [(gogoproto.nullable) = false]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryDelegationRewardsRequest is the request type for the +// Query/DelegationRewards RPC method. +message QueryDelegationRewardsRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // delegator_address defines the delegator address to query for. + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // validator_address defines the validator address to query for. + string validator_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryDelegationRewardsResponse is the response type for the +// Query/DelegationRewards RPC method. +message QueryDelegationRewardsResponse { + // rewards defines the rewards accrued by a delegation. + repeated cosmos.base.v1beta1.DecCoin rewards = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.DecCoins"]; +} + +// QueryDelegationTotalRewardsRequest is the request type for the +// Query/DelegationTotalRewards RPC method. +message QueryDelegationTotalRewardsRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + // delegator_address defines the delegator address to query for. + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryDelegationTotalRewardsResponse is the response type for the +// Query/DelegationTotalRewards RPC method. +message QueryDelegationTotalRewardsResponse { + // rewards defines all the rewards accrued by a delegator. + repeated DelegationDelegatorReward rewards = 1 [(gogoproto.nullable) = false]; + // total defines the sum of all the rewards. + repeated cosmos.base.v1beta1.DecCoin total = 2 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.DecCoins"]; +} + +// QueryDelegatorValidatorsRequest is the request type for the +// Query/DelegatorValidators RPC method. +message QueryDelegatorValidatorsRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // delegator_address defines the delegator address to query for. + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryDelegatorValidatorsResponse is the response type for the +// Query/DelegatorValidators RPC method. +message QueryDelegatorValidatorsResponse { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // validators defines the validators a delegator is delegating for. + repeated string validators = 1; +} + +// QueryDelegatorWithdrawAddressRequest is the request type for the +// Query/DelegatorWithdrawAddress RPC method. +message QueryDelegatorWithdrawAddressRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // delegator_address defines the delegator address to query for. + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryDelegatorWithdrawAddressResponse is the response type for the +// Query/DelegatorWithdrawAddress RPC method. +message QueryDelegatorWithdrawAddressResponse { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // withdraw_address defines the delegator address to query for. + string withdraw_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryCommunityPoolRequest is the request type for the Query/CommunityPool RPC +// method. +message QueryCommunityPoolRequest {} + +// QueryCommunityPoolResponse is the response type for the Query/CommunityPool +// RPC method. +message QueryCommunityPoolResponse { + // pool defines community pool's coins. + repeated cosmos.base.v1beta1.DecCoin pool = 1 + [(gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.DecCoins", (gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/distribution/v1beta1/tx.proto b/packages/codegen/proto/cosmos/distribution/v1beta1/tx.proto new file mode 100644 index 00000000..7f22dce9 --- /dev/null +++ b/packages/codegen/proto/cosmos/distribution/v1beta1/tx.proto @@ -0,0 +1,95 @@ +syntax = "proto3"; +package cosmos.distribution.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/distribution/types"; +option (gogoproto.equal_all) = true; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/msg/v1/msg.proto"; + +// Msg defines the distribution Msg service. +service Msg { + // SetWithdrawAddress defines a method to change the withdraw address + // for a delegator (or validator self-delegation). + rpc SetWithdrawAddress(MsgSetWithdrawAddress) returns (MsgSetWithdrawAddressResponse); + + // WithdrawDelegatorReward defines a method to withdraw rewards of delegator + // from a single validator. + rpc WithdrawDelegatorReward(MsgWithdrawDelegatorReward) returns (MsgWithdrawDelegatorRewardResponse); + + // WithdrawValidatorCommission defines a method to withdraw the + // full commission to the validator address. + rpc WithdrawValidatorCommission(MsgWithdrawValidatorCommission) returns (MsgWithdrawValidatorCommissionResponse); + + // FundCommunityPool defines a method to allow an account to directly + // fund the community pool. + rpc FundCommunityPool(MsgFundCommunityPool) returns (MsgFundCommunityPoolResponse); +} + +// MsgSetWithdrawAddress sets the withdraw address for +// a delegator (or validator self-delegation). +message MsgSetWithdrawAddress { + option (cosmos.msg.v1.signer) = "delegator_address"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string withdraw_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgSetWithdrawAddressResponse defines the Msg/SetWithdrawAddress response type. +message MsgSetWithdrawAddressResponse {} + +// MsgWithdrawDelegatorReward represents delegation withdrawal to a delegator +// from a single validator. +message MsgWithdrawDelegatorReward { + option (cosmos.msg.v1.signer) = "delegator_address"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string validator_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgWithdrawDelegatorRewardResponse defines the Msg/WithdrawDelegatorReward response type. +message MsgWithdrawDelegatorRewardResponse { + repeated cosmos.base.v1beta1.Coin amount = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; +} + +// MsgWithdrawValidatorCommission withdraws the full commission to the validator +// address. +message MsgWithdrawValidatorCommission { + option (cosmos.msg.v1.signer) = "validator_address"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string validator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgWithdrawValidatorCommissionResponse defines the Msg/WithdrawValidatorCommission response type. +message MsgWithdrawValidatorCommissionResponse { + repeated cosmos.base.v1beta1.Coin amount = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; +} + +// MsgFundCommunityPool allows an account to directly +// fund the community pool. +message MsgFundCommunityPool { + option (cosmos.msg.v1.signer) = "depositor"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + repeated cosmos.base.v1beta1.Coin amount = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + string depositor = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgFundCommunityPoolResponse defines the Msg/FundCommunityPool response type. +message MsgFundCommunityPoolResponse {} diff --git a/packages/codegen/proto/cosmos/evidence/v1beta1/evidence.proto b/packages/codegen/proto/cosmos/evidence/v1beta1/evidence.proto new file mode 100644 index 00000000..83f9ec3d --- /dev/null +++ b/packages/codegen/proto/cosmos/evidence/v1beta1/evidence.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; +package cosmos.evidence.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/evidence/types"; +option (gogoproto.equal_all) = true; + +import "gogoproto/gogo.proto"; +import "google/protobuf/timestamp.proto"; +import "cosmos_proto/cosmos.proto"; + +// Equivocation implements the Evidence interface and defines evidence of double +// signing misbehavior. +message Equivocation { + option (gogoproto.goproto_stringer) = false; + option (gogoproto.goproto_getters) = false; + option (gogoproto.equal) = false; + + int64 height = 1; + google.protobuf.Timestamp time = 2 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + int64 power = 3; + string consensus_address = 4 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} \ No newline at end of file diff --git a/packages/codegen/proto/cosmos/evidence/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/evidence/v1beta1/genesis.proto new file mode 100644 index 00000000..199f446f --- /dev/null +++ b/packages/codegen/proto/cosmos/evidence/v1beta1/genesis.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; +package cosmos.evidence.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/evidence/types"; + +import "google/protobuf/any.proto"; + +// GenesisState defines the evidence module's genesis state. +message GenesisState { + // evidence defines all the evidence at genesis. + repeated google.protobuf.Any evidence = 1; +} diff --git a/packages/codegen/proto/cosmos/evidence/v1beta1/query.proto b/packages/codegen/proto/cosmos/evidence/v1beta1/query.proto new file mode 100644 index 00000000..eda00544 --- /dev/null +++ b/packages/codegen/proto/cosmos/evidence/v1beta1/query.proto @@ -0,0 +1,51 @@ +syntax = "proto3"; +package cosmos.evidence.v1beta1; + +import "cosmos/base/query/v1beta1/pagination.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; +import "google/api/annotations.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/evidence/types"; + +// Query defines the gRPC querier service. +service Query { + // Evidence queries evidence based on evidence hash. + rpc Evidence(QueryEvidenceRequest) returns (QueryEvidenceResponse) { + option (google.api.http).get = "/cosmos/evidence/v1beta1/evidence/{evidence_hash}"; + } + + // AllEvidence queries all evidence. + rpc AllEvidence(QueryAllEvidenceRequest) returns (QueryAllEvidenceResponse) { + option (google.api.http).get = "/cosmos/evidence/v1beta1/evidence"; + } +} + +// QueryEvidenceRequest is the request type for the Query/Evidence RPC method. +message QueryEvidenceRequest { + // evidence_hash defines the hash of the requested evidence. + bytes evidence_hash = 1 [(gogoproto.casttype) = "github.com/tendermint/tendermint/libs/bytes.HexBytes"]; +} + +// QueryEvidenceResponse is the response type for the Query/Evidence RPC method. +message QueryEvidenceResponse { + // evidence returns the requested evidence. + google.protobuf.Any evidence = 1; +} + +// QueryEvidenceRequest is the request type for the Query/AllEvidence RPC +// method. +message QueryAllEvidenceRequest { + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// QueryAllEvidenceResponse is the response type for the Query/AllEvidence RPC +// method. +message QueryAllEvidenceResponse { + // evidence returns all evidences. + repeated google.protobuf.Any evidence = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} diff --git a/packages/codegen/proto/cosmos/evidence/v1beta1/tx.proto b/packages/codegen/proto/cosmos/evidence/v1beta1/tx.proto new file mode 100644 index 00000000..90f62964 --- /dev/null +++ b/packages/codegen/proto/cosmos/evidence/v1beta1/tx.proto @@ -0,0 +1,35 @@ +syntax = "proto3"; +package cosmos.evidence.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/evidence/types"; +option (gogoproto.equal_all) = true; + +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/msg/v1/msg.proto"; + +// Msg defines the evidence Msg service. +service Msg { + // SubmitEvidence submits an arbitrary Evidence of misbehavior such as equivocation or + // counterfactual signing. + rpc SubmitEvidence(MsgSubmitEvidence) returns (MsgSubmitEvidenceResponse); +} + +// MsgSubmitEvidence represents a message that supports submitting arbitrary +// Evidence of misbehavior such as equivocation or counterfactual signing. +message MsgSubmitEvidence { + option (cosmos.msg.v1.signer) = "submitter"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string submitter = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + google.protobuf.Any evidence = 2 [(cosmos_proto.accepts_interface) = "cosmos.evidence.Evidence"]; +} + +// MsgSubmitEvidenceResponse defines the Msg/SubmitEvidence response type. +message MsgSubmitEvidenceResponse { + // hash defines the hash of the evidence. + bytes hash = 4; +} diff --git a/packages/codegen/proto/cosmos/feegrant/v1beta1/feegrant.proto b/packages/codegen/proto/cosmos/feegrant/v1beta1/feegrant.proto new file mode 100644 index 00000000..25fec10b --- /dev/null +++ b/packages/codegen/proto/cosmos/feegrant/v1beta1/feegrant.proto @@ -0,0 +1,78 @@ +// Since: cosmos-sdk 0.43 +syntax = "proto3"; +package cosmos.feegrant.v1beta1; + +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/duration.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/feegrant"; + +// BasicAllowance implements Allowance with a one-time grant of tokens +// that optionally expires. The grantee can use up to SpendLimit to cover fees. +message BasicAllowance { + option (cosmos_proto.implements_interface) = "cosmos.feegrant.FeeAllowanceI"; + + // spend_limit specifies the maximum amount of tokens that can be spent + // by this allowance and will be updated as tokens are spent. If it is + // empty, there is no spend limit and any amount of coins can be spent. + repeated cosmos.base.v1beta1.Coin spend_limit = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + + // expiration specifies an optional time when this allowance expires + google.protobuf.Timestamp expiration = 2 [(gogoproto.stdtime) = true]; +} + +// PeriodicAllowance extends Allowance to allow for both a maximum cap, +// as well as a limit per time period. +message PeriodicAllowance { + option (cosmos_proto.implements_interface) = "cosmos.feegrant.FeeAllowanceI"; + + // basic specifies a struct of `BasicAllowance` + BasicAllowance basic = 1 [(gogoproto.nullable) = false]; + + // period specifies the time duration in which period_spend_limit coins can + // be spent before that allowance is reset + google.protobuf.Duration period = 2 [(gogoproto.stdduration) = true, (gogoproto.nullable) = false]; + + // period_spend_limit specifies the maximum number of coins that can be spent + // in the period + repeated cosmos.base.v1beta1.Coin period_spend_limit = 3 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + + // period_can_spend is the number of coins left to be spent before the period_reset time + repeated cosmos.base.v1beta1.Coin period_can_spend = 4 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + + // period_reset is the time at which this period resets and a new one begins, + // it is calculated from the start time of the first transaction after the + // last period ended + google.protobuf.Timestamp period_reset = 5 [(gogoproto.stdtime) = true, (gogoproto.nullable) = false]; +} + +// AllowedMsgAllowance creates allowance only for specified message types. +message AllowedMsgAllowance { + option (gogoproto.goproto_getters) = false; + option (cosmos_proto.implements_interface) = "cosmos.feegrant.FeeAllowanceI"; + + // allowance can be any of basic and periodic fee allowance. + google.protobuf.Any allowance = 1 [(cosmos_proto.accepts_interface) = "cosmos.feegrant.FeeAllowanceI"]; + + // allowed_messages are the messages for which the grantee has the access. + repeated string allowed_messages = 2; +} + +// Grant is stored in the KVStore to record a grant with full context +message Grant { + // granter is the address of the user granting an allowance of their funds. + string granter = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // grantee is the address of the user being granted an allowance of another user's funds. + string grantee = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // allowance can be any of basic, periodic, allowed fee allowance. + google.protobuf.Any allowance = 3 [(cosmos_proto.accepts_interface) = "cosmos.feegrant.FeeAllowanceI"]; +} diff --git a/packages/codegen/proto/cosmos/feegrant/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/feegrant/v1beta1/genesis.proto new file mode 100644 index 00000000..5b1ac4ca --- /dev/null +++ b/packages/codegen/proto/cosmos/feegrant/v1beta1/genesis.proto @@ -0,0 +1,13 @@ +// Since: cosmos-sdk 0.43 +syntax = "proto3"; +package cosmos.feegrant.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos/feegrant/v1beta1/feegrant.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/feegrant"; + +// GenesisState contains a set of fee allowances, persisted from the store +message GenesisState { + repeated Grant allowances = 1 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/feegrant/v1beta1/query.proto b/packages/codegen/proto/cosmos/feegrant/v1beta1/query.proto new file mode 100644 index 00000000..59c992c9 --- /dev/null +++ b/packages/codegen/proto/cosmos/feegrant/v1beta1/query.proto @@ -0,0 +1,79 @@ +// Since: cosmos-sdk 0.43 +syntax = "proto3"; +package cosmos.feegrant.v1beta1; + +import "cosmos/feegrant/v1beta1/feegrant.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "google/api/annotations.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/feegrant"; + +// Query defines the gRPC querier service. +service Query { + + // Allowance returns fee granted to the grantee by the granter. + rpc Allowance(QueryAllowanceRequest) returns (QueryAllowanceResponse) { + option (google.api.http).get = "/cosmos/feegrant/v1beta1/allowance/{granter}/{grantee}"; + } + + // Allowances returns all the grants for address. + rpc Allowances(QueryAllowancesRequest) returns (QueryAllowancesResponse) { + option (google.api.http).get = "/cosmos/feegrant/v1beta1/allowances/{grantee}"; + } + + // AllowancesByGranter returns all the grants given by an address + // Since v0.46 + rpc AllowancesByGranter(QueryAllowancesByGranterRequest) returns (QueryAllowancesByGranterResponse) { + option (google.api.http).get = "/cosmos/feegrant/v1beta1/issued/{granter}"; + } +} + +// QueryAllowanceRequest is the request type for the Query/Allowance RPC method. +message QueryAllowanceRequest { + // granter is the address of the user granting an allowance of their funds. + string granter = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // grantee is the address of the user being granted an allowance of another user's funds. + string grantee = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryAllowanceResponse is the response type for the Query/Allowance RPC method. +message QueryAllowanceResponse { + // allowance is a allowance granted for grantee by granter. + cosmos.feegrant.v1beta1.Grant allowance = 1; +} + +// QueryAllowancesRequest is the request type for the Query/Allowances RPC method. +message QueryAllowancesRequest { + string grantee = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryAllowancesResponse is the response type for the Query/Allowances RPC method. +message QueryAllowancesResponse { + // allowances are allowance's granted for grantee by granter. + repeated cosmos.feegrant.v1beta1.Grant allowances = 1; + + // pagination defines an pagination for the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryAllowancesByGranterRequest is the request type for the Query/AllowancesByGranter RPC method. +message QueryAllowancesByGranterRequest { + string granter = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryAllowancesByGranterResponse is the response type for the Query/AllowancesByGranter RPC method. +message QueryAllowancesByGranterResponse { + // allowances that have been issued by the granter. + repeated cosmos.feegrant.v1beta1.Grant allowances = 1; + + // pagination defines an pagination for the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} diff --git a/packages/codegen/proto/cosmos/feegrant/v1beta1/tx.proto b/packages/codegen/proto/cosmos/feegrant/v1beta1/tx.proto new file mode 100644 index 00000000..5cef0557 --- /dev/null +++ b/packages/codegen/proto/cosmos/feegrant/v1beta1/tx.proto @@ -0,0 +1,53 @@ +// Since: cosmos-sdk 0.43 +syntax = "proto3"; +package cosmos.feegrant.v1beta1; + +import "google/protobuf/any.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/msg/v1/msg.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/feegrant"; + +// Msg defines the feegrant msg service. +service Msg { + + // GrantAllowance grants fee allowance to the grantee on the granter's + // account with the provided expiration time. + rpc GrantAllowance(MsgGrantAllowance) returns (MsgGrantAllowanceResponse); + + // RevokeAllowance revokes any fee allowance of granter's account that + // has been granted to the grantee. + rpc RevokeAllowance(MsgRevokeAllowance) returns (MsgRevokeAllowanceResponse); +} + +// MsgGrantAllowance adds permission for Grantee to spend up to Allowance +// of fees from the account of Granter. +message MsgGrantAllowance { + option (cosmos.msg.v1.signer) = "granter"; + + // granter is the address of the user granting an allowance of their funds. + string granter = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // grantee is the address of the user being granted an allowance of another user's funds. + string grantee = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // allowance can be any of basic, periodic, allowed fee allowance. + google.protobuf.Any allowance = 3 [(cosmos_proto.accepts_interface) = "cosmos.feegrant.FeeAllowanceI"]; +} + +// MsgGrantAllowanceResponse defines the Msg/GrantAllowanceResponse response type. +message MsgGrantAllowanceResponse {} + +// MsgRevokeAllowance removes any existing Allowance from Granter to Grantee. +message MsgRevokeAllowance { + option (cosmos.msg.v1.signer) = "granter"; + + // granter is the address of the user granting an allowance of their funds. + string granter = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // grantee is the address of the user being granted an allowance of another user's funds. + string grantee = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgRevokeAllowanceResponse defines the Msg/RevokeAllowanceResponse response type. +message MsgRevokeAllowanceResponse {} diff --git a/packages/codegen/proto/cosmos/genutil/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/genutil/v1beta1/genesis.proto new file mode 100644 index 00000000..958d15fe --- /dev/null +++ b/packages/codegen/proto/cosmos/genutil/v1beta1/genesis.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; +package cosmos.genutil.v1beta1; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/genutil/types"; + +// GenesisState defines the raw genesis transaction in JSON. +message GenesisState { + // gen_txs defines the genesis transactions. + repeated bytes gen_txs = 1 [(gogoproto.casttype) = "encoding/json.RawMessage", (gogoproto.jsontag) = "gentxs"]; +} diff --git a/packages/codegen/proto/cosmos/gov/v1/genesis.proto b/packages/codegen/proto/cosmos/gov/v1/genesis.proto new file mode 100644 index 00000000..cb44a7f3 --- /dev/null +++ b/packages/codegen/proto/cosmos/gov/v1/genesis.proto @@ -0,0 +1,26 @@ +// Since: cosmos-sdk 0.46 +syntax = "proto3"; + +package cosmos.gov.v1; + +import "cosmos/gov/v1/gov.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/gov/types/v1"; + +// GenesisState defines the gov module's genesis state. +message GenesisState { + // starting_proposal_id is the ID of the starting proposal. + uint64 starting_proposal_id = 1; + // deposits defines all the deposits present at genesis. + repeated Deposit deposits = 2; + // votes defines all the votes present at genesis. + repeated Vote votes = 3; + // proposals defines all the proposals present at genesis. + repeated Proposal proposals = 4; + // params defines all the paramaters of related to deposit. + DepositParams deposit_params = 5; + // params defines all the paramaters of related to voting. + VotingParams voting_params = 6; + // params defines all the paramaters of related to tally. + TallyParams tally_params = 7; +} diff --git a/packages/codegen/proto/cosmos/gov/v1/gov.proto b/packages/codegen/proto/cosmos/gov/v1/gov.proto new file mode 100644 index 00000000..fb014d65 --- /dev/null +++ b/packages/codegen/proto/cosmos/gov/v1/gov.proto @@ -0,0 +1,132 @@ +// Since: cosmos-sdk 0.46 +syntax = "proto3"; +package cosmos.gov.v1; + +import "cosmos/base/v1beta1/coin.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/gov/types/v1"; + +// VoteOption enumerates the valid vote options for a given governance proposal. +enum VoteOption { + // VOTE_OPTION_UNSPECIFIED defines a no-op vote option. + VOTE_OPTION_UNSPECIFIED = 0; + // VOTE_OPTION_YES defines a yes vote option. + VOTE_OPTION_YES = 1; + // VOTE_OPTION_ABSTAIN defines an abstain vote option. + VOTE_OPTION_ABSTAIN = 2; + // VOTE_OPTION_NO defines a no vote option. + VOTE_OPTION_NO = 3; + // VOTE_OPTION_NO_WITH_VETO defines a no with veto vote option. + VOTE_OPTION_NO_WITH_VETO = 4; +} + +// WeightedVoteOption defines a unit of vote for vote split. +message WeightedVoteOption { + VoteOption option = 1; + string weight = 2 [(cosmos_proto.scalar) = "cosmos.Dec"]; +} + +// Deposit defines an amount deposited by an account address to an active +// proposal. +message Deposit { + uint64 proposal_id = 1; + string depositor = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + repeated cosmos.base.v1beta1.Coin amount = 3 [(gogoproto.nullable) = false]; +} + +// Proposal defines the core field members of a governance proposal. +message Proposal { + uint64 id = 1; + repeated google.protobuf.Any messages = 2; + ProposalStatus status = 3; + // final_tally_result is the final tally result of the proposal. When + // querying a proposal via gRPC, this field is not populated until the + // proposal's voting period has ended. + TallyResult final_tally_result = 4; + google.protobuf.Timestamp submit_time = 5 [(gogoproto.stdtime) = true]; + google.protobuf.Timestamp deposit_end_time = 6 [(gogoproto.stdtime) = true]; + repeated cosmos.base.v1beta1.Coin total_deposit = 7 [(gogoproto.nullable) = false]; + google.protobuf.Timestamp voting_start_time = 8 [(gogoproto.stdtime) = true]; + google.protobuf.Timestamp voting_end_time = 9 [(gogoproto.stdtime) = true]; + + // metadata is any arbitrary metadata attached to the proposal. + string metadata = 10; +} + +// ProposalStatus enumerates the valid statuses of a proposal. +enum ProposalStatus { + // PROPOSAL_STATUS_UNSPECIFIED defines the default propopsal status. + PROPOSAL_STATUS_UNSPECIFIED = 0; + // PROPOSAL_STATUS_DEPOSIT_PERIOD defines a proposal status during the deposit + // period. + PROPOSAL_STATUS_DEPOSIT_PERIOD = 1; + // PROPOSAL_STATUS_VOTING_PERIOD defines a proposal status during the voting + // period. + PROPOSAL_STATUS_VOTING_PERIOD = 2; + // PROPOSAL_STATUS_PASSED defines a proposal status of a proposal that has + // passed. + PROPOSAL_STATUS_PASSED = 3; + // PROPOSAL_STATUS_REJECTED defines a proposal status of a proposal that has + // been rejected. + PROPOSAL_STATUS_REJECTED = 4; + // PROPOSAL_STATUS_FAILED defines a proposal status of a proposal that has + // failed. + PROPOSAL_STATUS_FAILED = 5; +} + +// TallyResult defines a standard tally for a governance proposal. +message TallyResult { + string yes_count = 1 [(cosmos_proto.scalar) = "cosmos.Int"]; + string abstain_count = 2 [(cosmos_proto.scalar) = "cosmos.Int"]; + string no_count = 3 [(cosmos_proto.scalar) = "cosmos.Int"]; + string no_with_veto_count = 4 [(cosmos_proto.scalar) = "cosmos.Int"]; +} + +// Vote defines a vote on a governance proposal. +// A Vote consists of a proposal ID, the voter, and the vote option. +message Vote { + uint64 proposal_id = 1; + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + reserved 3; + repeated WeightedVoteOption options = 4; + + // metadata is any arbitrary metadata to attached to the vote. + string metadata = 5; +} + +// DepositParams defines the params for deposits on governance proposals. +message DepositParams { + // Minimum deposit for a proposal to enter voting period. + repeated cosmos.base.v1beta1.Coin min_deposit = 1 + [(gogoproto.nullable) = false, (gogoproto.jsontag) = "min_deposit,omitempty"]; + + // Maximum period for Atom holders to deposit on a proposal. Initial value: 2 + // months. + google.protobuf.Duration max_deposit_period = 2 + [(gogoproto.stdduration) = true, (gogoproto.jsontag) = "max_deposit_period,omitempty"]; +} + +// VotingParams defines the params for voting on governance proposals. +message VotingParams { + // Length of the voting period. + google.protobuf.Duration voting_period = 1 [(gogoproto.stdduration) = true]; +} + +// TallyParams defines the params for tallying votes on governance proposals. +message TallyParams { + // Minimum percentage of total stake needed to vote for a result to be + // considered valid. + string quorum = 1 [(cosmos_proto.scalar) = "cosmos.Dec", (gogoproto.jsontag) = "quorum,omitempty"]; + + // Minimum proportion of Yes votes for proposal to pass. Default value: 0.5. + string threshold = 2 [(cosmos_proto.scalar) = "cosmos.Dec", (gogoproto.jsontag) = "threshold,omitempty"]; + + // Minimum value of Veto votes to Total votes ratio for proposal to be + // vetoed. Default value: 1/3. + string veto_threshold = 3 [(cosmos_proto.scalar) = "cosmos.Dec", (gogoproto.jsontag) = "veto_threshold,omitempty"]; +} diff --git a/packages/codegen/proto/cosmos/gov/v1/query.proto b/packages/codegen/proto/cosmos/gov/v1/query.proto new file mode 100644 index 00000000..ea46472a --- /dev/null +++ b/packages/codegen/proto/cosmos/gov/v1/query.proto @@ -0,0 +1,183 @@ + +// Since: cosmos-sdk 0.46 +syntax = "proto3"; +package cosmos.gov.v1; + +import "cosmos/base/query/v1beta1/pagination.proto"; +import "google/api/annotations.proto"; +import "cosmos/gov/v1/gov.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/gov/types/v1"; + +// Query defines the gRPC querier service for gov module +service Query { + // Proposal queries proposal details based on ProposalID. + rpc Proposal(QueryProposalRequest) returns (QueryProposalResponse) { + option (google.api.http).get = "/cosmos/gov/v1/proposals/{proposal_id}"; + } + + // Proposals queries all proposals based on given status. + rpc Proposals(QueryProposalsRequest) returns (QueryProposalsResponse) { + option (google.api.http).get = "/cosmos/gov/v1/proposals"; + } + + // Vote queries voted information based on proposalID, voterAddr. + rpc Vote(QueryVoteRequest) returns (QueryVoteResponse) { + option (google.api.http).get = "/cosmos/gov/v1/proposals/{proposal_id}/votes/{voter}"; + } + + // Votes queries votes of a given proposal. + rpc Votes(QueryVotesRequest) returns (QueryVotesResponse) { + option (google.api.http).get = "/cosmos/gov/v1/proposals/{proposal_id}/votes"; + } + + // Params queries all parameters of the gov module. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/cosmos/gov/v1/params/{params_type}"; + } + + // Deposit queries single deposit information based proposalID, depositAddr. + rpc Deposit(QueryDepositRequest) returns (QueryDepositResponse) { + option (google.api.http).get = "/cosmos/gov/v1/proposals/{proposal_id}/deposits/{depositor}"; + } + + // Deposits queries all deposits of a single proposal. + rpc Deposits(QueryDepositsRequest) returns (QueryDepositsResponse) { + option (google.api.http).get = "/cosmos/gov/v1/proposals/{proposal_id}/deposits"; + } + + // TallyResult queries the tally of a proposal vote. + rpc TallyResult(QueryTallyResultRequest) returns (QueryTallyResultResponse) { + option (google.api.http).get = "/cosmos/gov/v1/proposals/{proposal_id}/tally"; + } +} + +// QueryProposalRequest is the request type for the Query/Proposal RPC method. +message QueryProposalRequest { + // proposal_id defines the unique id of the proposal. + uint64 proposal_id = 1; +} + +// QueryProposalResponse is the response type for the Query/Proposal RPC method. +message QueryProposalResponse { + Proposal proposal = 1; +} + +// QueryProposalsRequest is the request type for the Query/Proposals RPC method. +message QueryProposalsRequest { + // proposal_status defines the status of the proposals. + ProposalStatus proposal_status = 1; + + // voter defines the voter address for the proposals. + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // depositor defines the deposit addresses from the proposals. + string depositor = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 4; +} + +// QueryProposalsResponse is the response type for the Query/Proposals RPC +// method. +message QueryProposalsResponse { + repeated Proposal proposals = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryVoteRequest is the request type for the Query/Vote RPC method. +message QueryVoteRequest { + // proposal_id defines the unique id of the proposal. + uint64 proposal_id = 1; + + // voter defines the oter address for the proposals. + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryVoteResponse is the response type for the Query/Vote RPC method. +message QueryVoteResponse { + // vote defined the queried vote. + Vote vote = 1; +} + +// QueryVotesRequest is the request type for the Query/Votes RPC method. +message QueryVotesRequest { + // proposal_id defines the unique id of the proposal. + uint64 proposal_id = 1; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryVotesResponse is the response type for the Query/Votes RPC method. +message QueryVotesResponse { + // votes defined the queried votes. + repeated Vote votes = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryParamsRequest is the request type for the Query/Params RPC method. +message QueryParamsRequest { + // params_type defines which parameters to query for, can be one of "voting", + // "tallying" or "deposit". + string params_type = 1; +} + +// QueryParamsResponse is the response type for the Query/Params RPC method. +message QueryParamsResponse { + // voting_params defines the parameters related to voting. + VotingParams voting_params = 1; + // deposit_params defines the parameters related to deposit. + DepositParams deposit_params = 2; + // tally_params defines the parameters related to tally. + TallyParams tally_params = 3; +} + +// QueryDepositRequest is the request type for the Query/Deposit RPC method. +message QueryDepositRequest { + // proposal_id defines the unique id of the proposal. + uint64 proposal_id = 1; + + // depositor defines the deposit addresses from the proposals. + string depositor = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryDepositResponse is the response type for the Query/Deposit RPC method. +message QueryDepositResponse { + // deposit defines the requested deposit. + Deposit deposit = 1; +} + +// QueryDepositsRequest is the request type for the Query/Deposits RPC method. +message QueryDepositsRequest { + // proposal_id defines the unique id of the proposal. + uint64 proposal_id = 1; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryDepositsResponse is the response type for the Query/Deposits RPC method. +message QueryDepositsResponse { + repeated Deposit deposits = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryTallyResultRequest is the request type for the Query/Tally RPC method. +message QueryTallyResultRequest { + // proposal_id defines the unique id of the proposal. + uint64 proposal_id = 1; +} + +// QueryTallyResultResponse is the response type for the Query/Tally RPC method. +message QueryTallyResultResponse { + // tally defines the requested tally. + TallyResult tally = 1; +} diff --git a/packages/codegen/proto/cosmos/gov/v1/tx.proto b/packages/codegen/proto/cosmos/gov/v1/tx.proto new file mode 100644 index 00000000..7aee9991 --- /dev/null +++ b/packages/codegen/proto/cosmos/gov/v1/tx.proto @@ -0,0 +1,100 @@ +// Since: cosmos-sdk 0.46 +syntax = "proto3"; +package cosmos.gov.v1; + +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos/gov/v1/gov.proto"; +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; +import "google/protobuf/any.proto"; +import "cosmos/msg/v1/msg.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/gov/types/v1"; + +// Msg defines the gov Msg service. +service Msg { + // SubmitProposal defines a method to create new proposal given a content. + rpc SubmitProposal(MsgSubmitProposal) returns (MsgSubmitProposalResponse); + + // ExecLegacyContent defines a Msg to be in included in a MsgSubmitProposal + // to execute a legacy content-based proposal. + rpc ExecLegacyContent(MsgExecLegacyContent) returns (MsgExecLegacyContentResponse); + + // Vote defines a method to add a vote on a specific proposal. + rpc Vote(MsgVote) returns (MsgVoteResponse); + + // VoteWeighted defines a method to add a weighted vote on a specific proposal. + rpc VoteWeighted(MsgVoteWeighted) returns (MsgVoteWeightedResponse); + + // Deposit defines a method to add deposit on a specific proposal. + rpc Deposit(MsgDeposit) returns (MsgDepositResponse); +} + +// MsgSubmitProposal defines an sdk.Msg type that supports submitting arbitrary +// proposal Content. +message MsgSubmitProposal { + option (cosmos.msg.v1.signer) = "proposer"; + + repeated google.protobuf.Any messages = 1; + repeated cosmos.base.v1beta1.Coin initial_deposit = 2 [(gogoproto.nullable) = false]; + string proposer = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // metadata is any arbitrary metadata attached to the proposal. + string metadata = 4; +} + +// MsgSubmitProposalResponse defines the Msg/SubmitProposal response type. +message MsgSubmitProposalResponse { + uint64 proposal_id = 1; +} + +// MsgExecLegacyContent is used to wrap the legacy content field into a message. +// This ensures backwards compatibility with v1beta1.MsgSubmitProposal. +message MsgExecLegacyContent { + option (cosmos.msg.v1.signer) = "authority"; + + // content is the proposal's content. + google.protobuf.Any content = 1 [(cosmos_proto.accepts_interface) = "cosmos.gov.v1beta1.Content"]; + // authority must be the gov module address. + string authority = 2; +} + +// MsgExecLegacyContentResponse defines the Msg/ExecLegacyContent response type. +message MsgExecLegacyContentResponse {} + +// MsgVote defines a message to cast a vote. +message MsgVote { + option (cosmos.msg.v1.signer) = "voter"; + + uint64 proposal_id = 1 [(gogoproto.jsontag) = "proposal_id"]; + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + VoteOption option = 3; + string metadata = 4; +} + +// MsgVoteResponse defines the Msg/Vote response type. +message MsgVoteResponse {} + +// MsgVoteWeighted defines a message to cast a vote. +message MsgVoteWeighted { + option (cosmos.msg.v1.signer) = "voter"; + + uint64 proposal_id = 1 [(gogoproto.jsontag) = "proposal_id"]; + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + repeated WeightedVoteOption options = 3; + string metadata = 4; +} + +// MsgVoteWeightedResponse defines the Msg/VoteWeighted response type. +message MsgVoteWeightedResponse {} + +// MsgDeposit defines a message to submit a deposit to an existing proposal. +message MsgDeposit { + option (cosmos.msg.v1.signer) = "depositor"; + + uint64 proposal_id = 1 [(gogoproto.jsontag) = "proposal_id"]; + string depositor = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + repeated cosmos.base.v1beta1.Coin amount = 3 [(gogoproto.nullable) = false]; +} + +// MsgDepositResponse defines the Msg/Deposit response type. +message MsgDepositResponse {} diff --git a/packages/codegen/proto/cosmos/gov/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/gov/v1beta1/genesis.proto new file mode 100644 index 00000000..be9b07e4 --- /dev/null +++ b/packages/codegen/proto/cosmos/gov/v1beta1/genesis.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; + +package cosmos.gov.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos/gov/v1beta1/gov.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/gov/types/v1beta1"; + +// GenesisState defines the gov module's genesis state. +message GenesisState { + // starting_proposal_id is the ID of the starting proposal. + uint64 starting_proposal_id = 1; + // deposits defines all the deposits present at genesis. + repeated Deposit deposits = 2 [(gogoproto.castrepeated) = "Deposits", (gogoproto.nullable) = false]; + // votes defines all the votes present at genesis. + repeated Vote votes = 3 [(gogoproto.castrepeated) = "Votes", (gogoproto.nullable) = false]; + // proposals defines all the proposals present at genesis. + repeated Proposal proposals = 4 [(gogoproto.castrepeated) = "Proposals", (gogoproto.nullable) = false]; + // params defines all the paramaters of related to deposit. + DepositParams deposit_params = 5 [(gogoproto.nullable) = false]; + // params defines all the paramaters of related to voting. + VotingParams voting_params = 6 [(gogoproto.nullable) = false]; + // params defines all the paramaters of related to tally. + TallyParams tally_params = 7 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/gov/v1beta1/gov.proto b/packages/codegen/proto/cosmos/gov/v1beta1/gov.proto new file mode 100644 index 00000000..c23dd925 --- /dev/null +++ b/packages/codegen/proto/cosmos/gov/v1beta1/gov.proto @@ -0,0 +1,201 @@ +syntax = "proto3"; +package cosmos.gov.v1beta1; + +import "cosmos/base/v1beta1/coin.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/gov/types/v1beta1"; + +option (gogoproto.goproto_stringer_all) = false; +option (gogoproto.stringer_all) = false; +option (gogoproto.goproto_getters_all) = false; + +// VoteOption enumerates the valid vote options for a given governance proposal. +enum VoteOption { + option (gogoproto.goproto_enum_prefix) = false; + + // VOTE_OPTION_UNSPECIFIED defines a no-op vote option. + VOTE_OPTION_UNSPECIFIED = 0 [(gogoproto.enumvalue_customname) = "OptionEmpty"]; + // VOTE_OPTION_YES defines a yes vote option. + VOTE_OPTION_YES = 1 [(gogoproto.enumvalue_customname) = "OptionYes"]; + // VOTE_OPTION_ABSTAIN defines an abstain vote option. + VOTE_OPTION_ABSTAIN = 2 [(gogoproto.enumvalue_customname) = "OptionAbstain"]; + // VOTE_OPTION_NO defines a no vote option. + VOTE_OPTION_NO = 3 [(gogoproto.enumvalue_customname) = "OptionNo"]; + // VOTE_OPTION_NO_WITH_VETO defines a no with veto vote option. + VOTE_OPTION_NO_WITH_VETO = 4 [(gogoproto.enumvalue_customname) = "OptionNoWithVeto"]; +} + +// WeightedVoteOption defines a unit of vote for vote split. +// +// Since: cosmos-sdk 0.43 +message WeightedVoteOption { + VoteOption option = 1; + string weight = 2 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; +} + +// TextProposal defines a standard text proposal whose changes need to be +// manually updated in case of approval. +message TextProposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + option (gogoproto.equal) = true; + + string title = 1; + string description = 2; +} + +// Deposit defines an amount deposited by an account address to an active +// proposal. +message Deposit { + option (gogoproto.goproto_getters) = false; + option (gogoproto.equal) = false; + + uint64 proposal_id = 1; + string depositor = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + repeated cosmos.base.v1beta1.Coin amount = 3 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; +} + +// Proposal defines the core field members of a governance proposal. +message Proposal { + option (gogoproto.equal) = true; + + uint64 proposal_id = 1; + google.protobuf.Any content = 2 [(cosmos_proto.accepts_interface) = "cosmos.gov.v1beta1.Content"]; + ProposalStatus status = 3; + // final_tally_result is the final tally result of the proposal. When + // querying a proposal via gRPC, this field is not populated until the + // proposal's voting period has ended. + TallyResult final_tally_result = 4 [(gogoproto.nullable) = false]; + google.protobuf.Timestamp submit_time = 5 [(gogoproto.stdtime) = true, (gogoproto.nullable) = false]; + google.protobuf.Timestamp deposit_end_time = 6 [(gogoproto.stdtime) = true, (gogoproto.nullable) = false]; + repeated cosmos.base.v1beta1.Coin total_deposit = 7 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + google.protobuf.Timestamp voting_start_time = 8 [(gogoproto.stdtime) = true, (gogoproto.nullable) = false]; + google.protobuf.Timestamp voting_end_time = 9 [(gogoproto.stdtime) = true, (gogoproto.nullable) = false]; +} + +// ProposalStatus enumerates the valid statuses of a proposal. +enum ProposalStatus { + option (gogoproto.goproto_enum_prefix) = false; + + // PROPOSAL_STATUS_UNSPECIFIED defines the default propopsal status. + PROPOSAL_STATUS_UNSPECIFIED = 0 [(gogoproto.enumvalue_customname) = "StatusNil"]; + // PROPOSAL_STATUS_DEPOSIT_PERIOD defines a proposal status during the deposit + // period. + PROPOSAL_STATUS_DEPOSIT_PERIOD = 1 [(gogoproto.enumvalue_customname) = "StatusDepositPeriod"]; + // PROPOSAL_STATUS_VOTING_PERIOD defines a proposal status during the voting + // period. + PROPOSAL_STATUS_VOTING_PERIOD = 2 [(gogoproto.enumvalue_customname) = "StatusVotingPeriod"]; + // PROPOSAL_STATUS_PASSED defines a proposal status of a proposal that has + // passed. + PROPOSAL_STATUS_PASSED = 3 [(gogoproto.enumvalue_customname) = "StatusPassed"]; + // PROPOSAL_STATUS_REJECTED defines a proposal status of a proposal that has + // been rejected. + PROPOSAL_STATUS_REJECTED = 4 [(gogoproto.enumvalue_customname) = "StatusRejected"]; + // PROPOSAL_STATUS_FAILED defines a proposal status of a proposal that has + // failed. + PROPOSAL_STATUS_FAILED = 5 [(gogoproto.enumvalue_customname) = "StatusFailed"]; +} + +// TallyResult defines a standard tally for a governance proposal. +message TallyResult { + option (gogoproto.equal) = true; + + string yes = 1 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false + ]; + string abstain = 2 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false + ]; + string no = 3 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false + ]; + string no_with_veto = 4 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false + ]; +} + +// Vote defines a vote on a governance proposal. +// A Vote consists of a proposal ID, the voter, and the vote option. +message Vote { + option (gogoproto.goproto_stringer) = false; + option (gogoproto.equal) = false; + + uint64 proposal_id = 1 [(gogoproto.jsontag) = "id"]; + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // Deprecated: Prefer to use `options` instead. This field is set in queries + // if and only if `len(options) == 1` and that option has weight 1. In all + // other cases, this field will default to VOTE_OPTION_UNSPECIFIED. + VoteOption option = 3 [deprecated = true]; + // Since: cosmos-sdk 0.43 + repeated WeightedVoteOption options = 4 [(gogoproto.nullable) = false]; +} + +// DepositParams defines the params for deposits on governance proposals. +message DepositParams { + // Minimum deposit for a proposal to enter voting period. + repeated cosmos.base.v1beta1.Coin min_deposit = 1 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + (gogoproto.jsontag) = "min_deposit,omitempty" + ]; + + // Maximum period for Atom holders to deposit on a proposal. Initial value: 2 + // months. + google.protobuf.Duration max_deposit_period = 2 [ + (gogoproto.nullable) = false, + (gogoproto.stdduration) = true, + (gogoproto.jsontag) = "max_deposit_period,omitempty" + ]; +} + +// VotingParams defines the params for voting on governance proposals. +message VotingParams { + // Length of the voting period. + google.protobuf.Duration voting_period = 1 + [(gogoproto.nullable) = false, (gogoproto.stdduration) = true, (gogoproto.jsontag) = "voting_period,omitempty"]; +} + +// TallyParams defines the params for tallying votes on governance proposals. +message TallyParams { + // Minimum percentage of total stake needed to vote for a result to be + // considered valid. + bytes quorum = 1 [ + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "quorum,omitempty" + ]; + + // Minimum proportion of Yes votes for proposal to pass. Default value: 0.5. + bytes threshold = 2 [ + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "threshold,omitempty" + ]; + + // Minimum value of Veto votes to Total votes ratio for proposal to be + // vetoed. Default value: 1/3. + bytes veto_threshold = 3 [ + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "veto_threshold,omitempty" + ]; +} diff --git a/packages/codegen/proto/cosmos/gov/v1beta1/query.proto b/packages/codegen/proto/cosmos/gov/v1beta1/query.proto new file mode 100644 index 00000000..e8837fd2 --- /dev/null +++ b/packages/codegen/proto/cosmos/gov/v1beta1/query.proto @@ -0,0 +1,191 @@ +syntax = "proto3"; +package cosmos.gov.v1beta1; + +import "cosmos/base/query/v1beta1/pagination.proto"; +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "cosmos/gov/v1beta1/gov.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/gov/types/v1beta1"; + +// Query defines the gRPC querier service for gov module +service Query { + // Proposal queries proposal details based on ProposalID. + rpc Proposal(QueryProposalRequest) returns (QueryProposalResponse) { + option (google.api.http).get = "/cosmos/gov/v1beta1/proposals/{proposal_id}"; + } + + // Proposals queries all proposals based on given status. + rpc Proposals(QueryProposalsRequest) returns (QueryProposalsResponse) { + option (google.api.http).get = "/cosmos/gov/v1beta1/proposals"; + } + + // Vote queries voted information based on proposalID, voterAddr. + rpc Vote(QueryVoteRequest) returns (QueryVoteResponse) { + option (google.api.http).get = "/cosmos/gov/v1beta1/proposals/{proposal_id}/votes/{voter}"; + } + + // Votes queries votes of a given proposal. + rpc Votes(QueryVotesRequest) returns (QueryVotesResponse) { + option (google.api.http).get = "/cosmos/gov/v1beta1/proposals/{proposal_id}/votes"; + } + + // Params queries all parameters of the gov module. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/cosmos/gov/v1beta1/params/{params_type}"; + } + + // Deposit queries single deposit information based proposalID, depositAddr. + rpc Deposit(QueryDepositRequest) returns (QueryDepositResponse) { + option (google.api.http).get = "/cosmos/gov/v1beta1/proposals/{proposal_id}/deposits/{depositor}"; + } + + // Deposits queries all deposits of a single proposal. + rpc Deposits(QueryDepositsRequest) returns (QueryDepositsResponse) { + option (google.api.http).get = "/cosmos/gov/v1beta1/proposals/{proposal_id}/deposits"; + } + + // TallyResult queries the tally of a proposal vote. + rpc TallyResult(QueryTallyResultRequest) returns (QueryTallyResultResponse) { + option (google.api.http).get = "/cosmos/gov/v1beta1/proposals/{proposal_id}/tally"; + } +} + +// QueryProposalRequest is the request type for the Query/Proposal RPC method. +message QueryProposalRequest { + // proposal_id defines the unique id of the proposal. + uint64 proposal_id = 1; +} + +// QueryProposalResponse is the response type for the Query/Proposal RPC method. +message QueryProposalResponse { + Proposal proposal = 1 [(gogoproto.nullable) = false]; +} + +// QueryProposalsRequest is the request type for the Query/Proposals RPC method. +message QueryProposalsRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // proposal_status defines the status of the proposals. + ProposalStatus proposal_status = 1; + + // voter defines the voter address for the proposals. + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // depositor defines the deposit addresses from the proposals. + string depositor = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 4; +} + +// QueryProposalsResponse is the response type for the Query/Proposals RPC +// method. +message QueryProposalsResponse { + repeated Proposal proposals = 1 [(gogoproto.nullable) = false]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryVoteRequest is the request type for the Query/Vote RPC method. +message QueryVoteRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // proposal_id defines the unique id of the proposal. + uint64 proposal_id = 1; + + // voter defines the oter address for the proposals. + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryVoteResponse is the response type for the Query/Vote RPC method. +message QueryVoteResponse { + // vote defined the queried vote. + Vote vote = 1 [(gogoproto.nullable) = false]; +} + +// QueryVotesRequest is the request type for the Query/Votes RPC method. +message QueryVotesRequest { + // proposal_id defines the unique id of the proposal. + uint64 proposal_id = 1; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryVotesResponse is the response type for the Query/Votes RPC method. +message QueryVotesResponse { + // votes defined the queried votes. + repeated Vote votes = 1 [(gogoproto.nullable) = false]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryParamsRequest is the request type for the Query/Params RPC method. +message QueryParamsRequest { + // params_type defines which parameters to query for, can be one of "voting", + // "tallying" or "deposit". + string params_type = 1; +} + +// QueryParamsResponse is the response type for the Query/Params RPC method. +message QueryParamsResponse { + // voting_params defines the parameters related to voting. + VotingParams voting_params = 1 [(gogoproto.nullable) = false]; + // deposit_params defines the parameters related to deposit. + DepositParams deposit_params = 2 [(gogoproto.nullable) = false]; + // tally_params defines the parameters related to tally. + TallyParams tally_params = 3 [(gogoproto.nullable) = false]; +} + +// QueryDepositRequest is the request type for the Query/Deposit RPC method. +message QueryDepositRequest { + option (gogoproto.goproto_getters) = false; + option (gogoproto.equal) = false; + + // proposal_id defines the unique id of the proposal. + uint64 proposal_id = 1; + + // depositor defines the deposit addresses from the proposals. + string depositor = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryDepositResponse is the response type for the Query/Deposit RPC method. +message QueryDepositResponse { + // deposit defines the requested deposit. + Deposit deposit = 1 [(gogoproto.nullable) = false]; +} + +// QueryDepositsRequest is the request type for the Query/Deposits RPC method. +message QueryDepositsRequest { + // proposal_id defines the unique id of the proposal. + uint64 proposal_id = 1; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryDepositsResponse is the response type for the Query/Deposits RPC method. +message QueryDepositsResponse { + repeated Deposit deposits = 1 [(gogoproto.nullable) = false]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryTallyResultRequest is the request type for the Query/Tally RPC method. +message QueryTallyResultRequest { + // proposal_id defines the unique id of the proposal. + uint64 proposal_id = 1; +} + +// QueryTallyResultResponse is the response type for the Query/Tally RPC method. +message QueryTallyResultResponse { + // tally defines the requested tally. + TallyResult tally = 1 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/gov/v1beta1/tx.proto b/packages/codegen/proto/cosmos/gov/v1beta1/tx.proto new file mode 100644 index 00000000..6b2f1689 --- /dev/null +++ b/packages/codegen/proto/cosmos/gov/v1beta1/tx.proto @@ -0,0 +1,106 @@ +syntax = "proto3"; +package cosmos.gov.v1beta1; + +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos/gov/v1beta1/gov.proto"; +import "cosmos_proto/cosmos.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; + +import "cosmos/msg/v1/msg.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/gov/types/v1beta1"; + +// Msg defines the bank Msg service. +service Msg { + // SubmitProposal defines a method to create new proposal given a content. + rpc SubmitProposal(MsgSubmitProposal) returns (MsgSubmitProposalResponse); + + // Vote defines a method to add a vote on a specific proposal. + rpc Vote(MsgVote) returns (MsgVoteResponse); + + // VoteWeighted defines a method to add a weighted vote on a specific proposal. + // + // Since: cosmos-sdk 0.43 + rpc VoteWeighted(MsgVoteWeighted) returns (MsgVoteWeightedResponse); + + // Deposit defines a method to add deposit on a specific proposal. + rpc Deposit(MsgDeposit) returns (MsgDepositResponse); +} + +// MsgSubmitProposal defines an sdk.Msg type that supports submitting arbitrary +// proposal Content. +message MsgSubmitProposal { + option (cosmos.msg.v1.signer) = "proposer"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_stringer) = false; + option (gogoproto.stringer) = false; + option (gogoproto.goproto_getters) = false; + + google.protobuf.Any content = 1 [(cosmos_proto.accepts_interface) = "cosmos.gov.v1beta1.Content"]; + repeated cosmos.base.v1beta1.Coin initial_deposit = 2 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + string proposer = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgSubmitProposalResponse defines the Msg/SubmitProposal response type. +message MsgSubmitProposalResponse { + uint64 proposal_id = 1 [(gogoproto.jsontag) = "proposal_id"]; +} + +// MsgVote defines a message to cast a vote. +message MsgVote { + option (cosmos.msg.v1.signer) = "voter"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_stringer) = false; + option (gogoproto.stringer) = false; + option (gogoproto.goproto_getters) = false; + + uint64 proposal_id = 1; + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + VoteOption option = 3; +} + +// MsgVoteResponse defines the Msg/Vote response type. +message MsgVoteResponse {} + +// MsgVoteWeighted defines a message to cast a vote. +// +// Since: cosmos-sdk 0.43 +message MsgVoteWeighted { + option (cosmos.msg.v1.signer) = "voter"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_stringer) = false; + option (gogoproto.stringer) = false; + option (gogoproto.goproto_getters) = false; + + uint64 proposal_id = 1 [(gogoproto.jsontag) = "proposal_id"]; + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + repeated WeightedVoteOption options = 3 [(gogoproto.nullable) = false]; +} + +// MsgVoteWeightedResponse defines the Msg/VoteWeighted response type. +// +// Since: cosmos-sdk 0.43 +message MsgVoteWeightedResponse {} + +// MsgDeposit defines a message to submit a deposit to an existing proposal. +message MsgDeposit { + option (cosmos.msg.v1.signer) = "depositor"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_stringer) = false; + option (gogoproto.stringer) = false; + option (gogoproto.goproto_getters) = false; + + uint64 proposal_id = 1 [(gogoproto.jsontag) = "proposal_id"]; + string depositor = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + repeated cosmos.base.v1beta1.Coin amount = 3 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; +} + +// MsgDepositResponse defines the Msg/Deposit response type. +message MsgDepositResponse {} diff --git a/packages/codegen/proto/cosmos/group/v1/events.proto b/packages/codegen/proto/cosmos/group/v1/events.proto new file mode 100644 index 00000000..e8907243 --- /dev/null +++ b/packages/codegen/proto/cosmos/group/v1/events.proto @@ -0,0 +1,77 @@ +syntax = "proto3"; + +package cosmos.group.v1; + +import "cosmos_proto/cosmos.proto"; +import "cosmos/group/v1/types.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/group"; + +// EventCreateGroup is an event emitted when a group is created. +message EventCreateGroup { + + // group_id is the unique ID of the group. + uint64 group_id = 1; +} + +// EventUpdateGroup is an event emitted when a group is updated. +message EventUpdateGroup { + + // group_id is the unique ID of the group. + uint64 group_id = 1; +} + +// EventCreateGroupPolicy is an event emitted when a group policy is created. +message EventCreateGroupPolicy { + + // address is the account address of the group policy. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// EventUpdateGroupPolicy is an event emitted when a group policy is updated. +message EventUpdateGroupPolicy { + + // address is the account address of the group policy. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// EventSubmitProposal is an event emitted when a proposal is created. +message EventSubmitProposal { + + // proposal_id is the unique ID of the proposal. + uint64 proposal_id = 1; +} + +// EventWithdrawProposal is an event emitted when a proposal is withdrawn. +message EventWithdrawProposal { + + // proposal_id is the unique ID of the proposal. + uint64 proposal_id = 1; +} + +// EventVote is an event emitted when a voter votes on a proposal. +message EventVote { + + // proposal_id is the unique ID of the proposal. + uint64 proposal_id = 1; +} + +// EventExec is an event emitted when a proposal is executed. +message EventExec { + + // proposal_id is the unique ID of the proposal. + uint64 proposal_id = 1; + + // result is the proposal execution result. + ProposalExecutorResult result = 2; +} + +// EventLeaveGroup is an event emitted when group member leaves the group. +message EventLeaveGroup { + + // group_id is the unique ID of the group. + uint64 group_id = 1; + + // address is the account address of the group member. + string address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} diff --git a/packages/codegen/proto/cosmos/group/v1/genesis.proto b/packages/codegen/proto/cosmos/group/v1/genesis.proto new file mode 100644 index 00000000..49655ad2 --- /dev/null +++ b/packages/codegen/proto/cosmos/group/v1/genesis.proto @@ -0,0 +1,38 @@ +syntax = "proto3"; + +package cosmos.group.v1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/group"; + +import "cosmos/group/v1/types.proto"; + +// GenesisState defines the group module's genesis state. +message GenesisState { + + // group_seq is the group table orm.Sequence, + // it is used to get the next group ID. + uint64 group_seq = 1; + + // groups is the list of groups info. + repeated GroupInfo groups = 2; + + // group_members is the list of groups members. + repeated GroupMember group_members = 3; + + // group_policy_seq is the group policy table orm.Sequence, + // it is used to generate the next group policy account address. + uint64 group_policy_seq = 4; + + // group_policies is the list of group policies info. + repeated GroupPolicyInfo group_policies = 5; + + // proposal_seq is the proposal table orm.Sequence, + // it is used to get the next proposal ID. + uint64 proposal_seq = 6; + + // proposals is the list of proposals. + repeated Proposal proposals = 7; + + // votes is the list of votes. + repeated Vote votes = 8; +} \ No newline at end of file diff --git a/packages/codegen/proto/cosmos/group/v1/query.proto b/packages/codegen/proto/cosmos/group/v1/query.proto new file mode 100644 index 00000000..1690d5b7 --- /dev/null +++ b/packages/codegen/proto/cosmos/group/v1/query.proto @@ -0,0 +1,308 @@ +syntax = "proto3"; + +package cosmos.group.v1; + +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "cosmos/group/v1/types.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/group"; + +// Query is the cosmos.group.v1 Query service. +service Query { + + // GroupInfo queries group info based on group id. + rpc GroupInfo(QueryGroupInfoRequest) returns (QueryGroupInfoResponse) { + option (google.api.http).get = "/cosmos/group/v1/group_info/{group_id}"; + }; + + // GroupPolicyInfo queries group policy info based on account address of group policy. + rpc GroupPolicyInfo(QueryGroupPolicyInfoRequest) returns (QueryGroupPolicyInfoResponse) { + option (google.api.http).get = "/cosmos/group/v1/group_policy_info/{address}"; + }; + + // GroupMembers queries members of a group + rpc GroupMembers(QueryGroupMembersRequest) returns (QueryGroupMembersResponse) { + option (google.api.http).get = "/cosmos/group/v1/group_members/{group_id}"; + }; + + // GroupsByAdmin queries groups by admin address. + rpc GroupsByAdmin(QueryGroupsByAdminRequest) returns (QueryGroupsByAdminResponse) { + option (google.api.http).get = "/cosmos/group/v1/groups_by_admin/{admin}"; + }; + + // GroupPoliciesByGroup queries group policies by group id. + rpc GroupPoliciesByGroup(QueryGroupPoliciesByGroupRequest) returns (QueryGroupPoliciesByGroupResponse) { + option (google.api.http).get = "/cosmos/group/v1/group_policies_by_group/{group_id}"; + }; + + // GroupsByAdmin queries group policies by admin address. + rpc GroupPoliciesByAdmin(QueryGroupPoliciesByAdminRequest) returns (QueryGroupPoliciesByAdminResponse) { + option (google.api.http).get = "/cosmos/group/v1/group_policies_by_admin/{admin}"; + }; + + // Proposal queries a proposal based on proposal id. + rpc Proposal(QueryProposalRequest) returns (QueryProposalResponse) { + option (google.api.http).get = "/cosmos/group/v1/proposal/{proposal_id}"; + }; + + // ProposalsByGroupPolicy queries proposals based on account address of group policy. + rpc ProposalsByGroupPolicy(QueryProposalsByGroupPolicyRequest) returns (QueryProposalsByGroupPolicyResponse) { + option (google.api.http).get = "/cosmos/group/v1/proposals_by_group_policy/{address}"; + }; + + // VoteByProposalVoter queries a vote by proposal id and voter. + rpc VoteByProposalVoter(QueryVoteByProposalVoterRequest) returns (QueryVoteByProposalVoterResponse) { + option (google.api.http).get = "/cosmos/group/v1/vote_by_proposal_voter/{proposal_id}/{voter}"; + }; + + // VotesByProposal queries a vote by proposal. + rpc VotesByProposal(QueryVotesByProposalRequest) returns (QueryVotesByProposalResponse) { + option (google.api.http).get = "/cosmos/group/v1/votes_by_proposal/{proposal_id}"; + }; + + // VotesByVoter queries a vote by voter. + rpc VotesByVoter(QueryVotesByVoterRequest) returns (QueryVotesByVoterResponse) { + option (google.api.http).get = "/cosmos/group/v1/votes_by_voter/{voter}"; + }; + + // GroupsByMember queries groups by member address. + rpc GroupsByMember(QueryGroupsByMemberRequest) returns (QueryGroupsByMemberResponse) { + option (google.api.http).get = "/cosmos/group/v1/groups_by_member/{address}"; + }; + + // TallyResult queries the tally of a proposal votes. + rpc TallyResult(QueryTallyResultRequest) returns (QueryTallyResultResponse) { + option (google.api.http).get = "/cosmos/group/v1/proposals/{proposal_id}/tally"; + }; +} + +// QueryGroupInfoRequest is the Query/GroupInfo request type. +message QueryGroupInfoRequest { + + // group_id is the unique ID of the group. + uint64 group_id = 1; +} + +// QueryGroupInfoResponse is the Query/GroupInfo response type. +message QueryGroupInfoResponse { + + // info is the GroupInfo for the group. + GroupInfo info = 1; +} + +// QueryGroupPolicyInfoRequest is the Query/GroupPolicyInfo request type. +message QueryGroupPolicyInfoRequest { + + // address is the account address of the group policy. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryGroupPolicyInfoResponse is the Query/GroupPolicyInfo response type. +message QueryGroupPolicyInfoResponse { + + // info is the GroupPolicyInfo for the group policy. + GroupPolicyInfo info = 1; +} + +// QueryGroupMembersRequest is the Query/GroupMembers request type. +message QueryGroupMembersRequest { + + // group_id is the unique ID of the group. + uint64 group_id = 1; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryGroupMembersResponse is the Query/GroupMembersResponse response type. +message QueryGroupMembersResponse { + + // members are the members of the group with given group_id. + repeated GroupMember members = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryGroupsByAdminRequest is the Query/GroupsByAdmin request type. +message QueryGroupsByAdminRequest { + + // admin is the account address of a group's admin. + string admin = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryGroupsByAdminResponse is the Query/GroupsByAdminResponse response type. +message QueryGroupsByAdminResponse { + + // groups are the groups info with the provided admin. + repeated GroupInfo groups = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryGroupPoliciesByGroupRequest is the Query/GroupPoliciesByGroup request type. +message QueryGroupPoliciesByGroupRequest { + + // group_id is the unique ID of the group policy's group. + uint64 group_id = 1; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryGroupPoliciesByGroupResponse is the Query/GroupPoliciesByGroup response type. +message QueryGroupPoliciesByGroupResponse { + + // group_policies are the group policies info associated with the provided group. + repeated GroupPolicyInfo group_policies = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryGroupPoliciesByAdminRequest is the Query/GroupPoliciesByAdmin request type. +message QueryGroupPoliciesByAdminRequest { + + // admin is the admin address of the group policy. + string admin = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryGroupPoliciesByAdminResponse is the Query/GroupPoliciesByAdmin response type. +message QueryGroupPoliciesByAdminResponse { + + // group_policies are the group policies info with provided admin. + repeated GroupPolicyInfo group_policies = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryProposalRequest is the Query/Proposal request type. +message QueryProposalRequest { + + // proposal_id is the unique ID of a proposal. + uint64 proposal_id = 1; +} + +// QueryProposalResponse is the Query/Proposal response type. +message QueryProposalResponse { + + // proposal is the proposal info. + Proposal proposal = 1; +} + +// QueryProposalsByGroupPolicyRequest is the Query/ProposalByGroupPolicy request type. +message QueryProposalsByGroupPolicyRequest { + + // address is the account address of the group policy related to proposals. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryProposalsByGroupPolicyResponse is the Query/ProposalByGroupPolicy response type. +message QueryProposalsByGroupPolicyResponse { + + // proposals are the proposals with given group policy. + repeated Proposal proposals = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryVoteByProposalVoterRequest is the Query/VoteByProposalVoter request type. +message QueryVoteByProposalVoterRequest { + + // proposal_id is the unique ID of a proposal. + uint64 proposal_id = 1; + + // voter is a proposal voter account address. + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryVoteByProposalVoterResponse is the Query/VoteByProposalVoter response type. +message QueryVoteByProposalVoterResponse { + + // vote is the vote with given proposal_id and voter. + Vote vote = 1; +} + +// QueryVotesByProposalRequest is the Query/VotesByProposal request type. +message QueryVotesByProposalRequest { + + // proposal_id is the unique ID of a proposal. + uint64 proposal_id = 1; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryVotesByProposalResponse is the Query/VotesByProposal response type. +message QueryVotesByProposalResponse { + + // votes are the list of votes for given proposal_id. + repeated Vote votes = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryVotesByVoterRequest is the Query/VotesByVoter request type. +message QueryVotesByVoterRequest { + // voter is a proposal voter account address. + string voter = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryVotesByVoterResponse is the Query/VotesByVoter response type. +message QueryVotesByVoterResponse { + + // votes are the list of votes by given voter. + repeated Vote votes = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryGroupsByMemberRequest is the Query/GroupsByMember request type. +message QueryGroupsByMemberRequest { + // address is the group member address. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryGroupsByMemberResponse is the Query/GroupsByMember response type. +message QueryGroupsByMemberResponse { + // groups are the groups info with the provided group member. + repeated GroupInfo groups = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryTallyResultRequest is the Query/TallyResult request type. +message QueryTallyResultRequest { + // proposal_id is the unique id of a proposal. + uint64 proposal_id = 1; +} + +// QueryTallyResultResponse is the Query/TallyResult response type. +message QueryTallyResultResponse { + // tally defines the requested tally. + TallyResult tally = 1 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/group/v1/tx.proto b/packages/codegen/proto/cosmos/group/v1/tx.proto new file mode 100644 index 00000000..9fb0caa1 --- /dev/null +++ b/packages/codegen/proto/cosmos/group/v1/tx.proto @@ -0,0 +1,364 @@ +syntax = "proto3"; + +package cosmos.group.v1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/group"; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; +import "google/protobuf/any.proto"; +import "cosmos/group/v1/types.proto"; + +import "cosmos/msg/v1/msg.proto"; + +// Msg is the cosmos.group.v1 Msg service. +service Msg { + + // CreateGroup creates a new group with an admin account address, a list of members and some optional metadata. + rpc CreateGroup(MsgCreateGroup) returns (MsgCreateGroupResponse); + + // UpdateGroupMembers updates the group members with given group id and admin address. + rpc UpdateGroupMembers(MsgUpdateGroupMembers) returns (MsgUpdateGroupMembersResponse); + + // UpdateGroupAdmin updates the group admin with given group id and previous admin address. + rpc UpdateGroupAdmin(MsgUpdateGroupAdmin) returns (MsgUpdateGroupAdminResponse); + + // UpdateGroupMetadata updates the group metadata with given group id and admin address. + rpc UpdateGroupMetadata(MsgUpdateGroupMetadata) returns (MsgUpdateGroupMetadataResponse); + + // CreateGroupPolicy creates a new group policy using given DecisionPolicy. + rpc CreateGroupPolicy(MsgCreateGroupPolicy) returns (MsgCreateGroupPolicyResponse); + + // CreateGroupWithPolicy creates a new group with policy. + rpc CreateGroupWithPolicy(MsgCreateGroupWithPolicy) returns (MsgCreateGroupWithPolicyResponse); + + // UpdateGroupPolicyAdmin updates a group policy admin. + rpc UpdateGroupPolicyAdmin(MsgUpdateGroupPolicyAdmin) returns (MsgUpdateGroupPolicyAdminResponse); + + // UpdateGroupPolicyDecisionPolicy allows a group policy's decision policy to be updated. + rpc UpdateGroupPolicyDecisionPolicy(MsgUpdateGroupPolicyDecisionPolicy) + returns (MsgUpdateGroupPolicyDecisionPolicyResponse); + + // UpdateGroupPolicyMetadata updates a group policy metadata. + rpc UpdateGroupPolicyMetadata(MsgUpdateGroupPolicyMetadata) returns (MsgUpdateGroupPolicyMetadataResponse); + + // SubmitProposal submits a new proposal. + rpc SubmitProposal(MsgSubmitProposal) returns (MsgSubmitProposalResponse); + + // WithdrawProposal aborts a proposal. + rpc WithdrawProposal(MsgWithdrawProposal) returns (MsgWithdrawProposalResponse); + + // Vote allows a voter to vote on a proposal. + rpc Vote(MsgVote) returns (MsgVoteResponse); + + // Exec executes a proposal. + rpc Exec(MsgExec) returns (MsgExecResponse); + + // LeaveGroup allows a group member to leave the group. + rpc LeaveGroup(MsgLeaveGroup) returns (MsgLeaveGroupResponse); +} + +// +// Groups +// + +// MsgCreateGroup is the Msg/CreateGroup request type. +message MsgCreateGroup { + option (cosmos.msg.v1.signer) = "admin"; + // admin is the account address of the group admin. + string admin = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // members defines the group members. + repeated Member members = 2 [(gogoproto.nullable) = false]; + + // metadata is any arbitrary metadata to attached to the group. + string metadata = 3; +} + +// MsgCreateGroupResponse is the Msg/CreateGroup response type. +message MsgCreateGroupResponse { + + // group_id is the unique ID of the newly created group. + uint64 group_id = 1; +} + +// MsgUpdateGroupMembers is the Msg/UpdateGroupMembers request type. +message MsgUpdateGroupMembers { + option (cosmos.msg.v1.signer) = "admin"; + + // admin is the account address of the group admin. + string admin = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // group_id is the unique ID of the group. + uint64 group_id = 2; + + // member_updates is the list of members to update, + // set weight to 0 to remove a member. + repeated Member member_updates = 3 [(gogoproto.nullable) = false]; +} + +// MsgUpdateGroupMembersResponse is the Msg/UpdateGroupMembers response type. +message MsgUpdateGroupMembersResponse {} + +// MsgUpdateGroupAdmin is the Msg/UpdateGroupAdmin request type. +message MsgUpdateGroupAdmin { + option (cosmos.msg.v1.signer) = "admin"; + + // admin is the current account address of the group admin. + string admin = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // group_id is the unique ID of the group. + uint64 group_id = 2; + + // new_admin is the group new admin account address. + string new_admin = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgUpdateGroupAdminResponse is the Msg/UpdateGroupAdmin response type. +message MsgUpdateGroupAdminResponse {} + +// MsgUpdateGroupMetadata is the Msg/UpdateGroupMetadata request type. +message MsgUpdateGroupMetadata { + option (cosmos.msg.v1.signer) = "admin"; + + // admin is the account address of the group admin. + string admin = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // group_id is the unique ID of the group. + uint64 group_id = 2; + + // metadata is the updated group's metadata. + string metadata = 3; +} + +// MsgUpdateGroupMetadataResponse is the Msg/UpdateGroupMetadata response type. +message MsgUpdateGroupMetadataResponse {} + +// +// Group Policies +// + +// MsgCreateGroupPolicy is the Msg/CreateGroupPolicy request type. +message MsgCreateGroupPolicy { + option (cosmos.msg.v1.signer) = "admin"; + + option (gogoproto.goproto_getters) = false; + + // admin is the account address of the group admin. + string admin = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // group_id is the unique ID of the group. + uint64 group_id = 2; + + // metadata is any arbitrary metadata attached to the group policy. + string metadata = 3; + + // decision_policy specifies the group policy's decision policy. + google.protobuf.Any decision_policy = 4 [(cosmos_proto.accepts_interface) = "cosmos.group.DecisionPolicy"]; +} + +// MsgCreateGroupPolicyResponse is the Msg/CreateGroupPolicy response type. +message MsgCreateGroupPolicyResponse { + + // address is the account address of the newly created group policy. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgUpdateGroupPolicyAdmin is the Msg/UpdateGroupPolicyAdmin request type. +message MsgUpdateGroupPolicyAdmin { + option (cosmos.msg.v1.signer) = "admin"; + + // admin is the account address of the group admin. + string admin = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // address is the account address of the group policy. + string address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // new_admin is the new group policy admin. + string new_admin = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgCreateGroupWithPolicy is the Msg/CreateGroupWithPolicy request type. +message MsgCreateGroupWithPolicy { + option (gogoproto.goproto_getters) = false; + + // admin is the account address of the group and group policy admin. + string admin = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // members defines the group members. + repeated Member members = 2 [(gogoproto.nullable) = false]; + + // group_metadata is any arbitrary metadata attached to the group. + string group_metadata = 3; + + // group_policy_metadata is any arbitrary metadata attached to the group policy. + string group_policy_metadata = 4; + + // group_policy_as_admin is a boolean field, if set to true, the group policy account address will be used as group and group policy admin. + bool group_policy_as_admin = 5; + + // decision_policy specifies the group policy's decision policy. + google.protobuf.Any decision_policy = 6 [(cosmos_proto.accepts_interface) = "cosmos.group.DecisionPolicy"]; +} + +// MsgCreateGroupWithPolicyResponse is the Msg/CreateGroupWithPolicy response type. +message MsgCreateGroupWithPolicyResponse { + + // group_id is the unique ID of the newly created group with policy. + uint64 group_id = 1; + + // group_policy_address is the account address of the newly created group policy. + string group_policy_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgUpdateGroupPolicyAdminResponse is the Msg/UpdateGroupPolicyAdmin response type. +message MsgUpdateGroupPolicyAdminResponse {} + +// MsgUpdateGroupPolicyDecisionPolicy is the Msg/UpdateGroupPolicyDecisionPolicy request type. +message MsgUpdateGroupPolicyDecisionPolicy { + option (cosmos.msg.v1.signer) = "admin"; + + option (gogoproto.goproto_getters) = false; + + // admin is the account address of the group admin. + string admin = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // address is the account address of group policy. + string address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // decision_policy is the updated group policy's decision policy. + google.protobuf.Any decision_policy = 3 [(cosmos_proto.accepts_interface) = "cosmos.group.DecisionPolicy"]; +} + +// MsgUpdateGroupPolicyDecisionPolicyResponse is the Msg/UpdateGroupPolicyDecisionPolicy response type. +message MsgUpdateGroupPolicyDecisionPolicyResponse {} + +// MsgUpdateGroupPolicyMetadata is the Msg/UpdateGroupPolicyMetadata request type. +message MsgUpdateGroupPolicyMetadata { + option (cosmos.msg.v1.signer) = "admin"; + + // admin is the account address of the group admin. + string admin = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // address is the account address of group policy. + string address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // metadata is the updated group policy metadata. + string metadata = 3; +} + +// MsgUpdateGroupPolicyMetadataResponse is the Msg/UpdateGroupPolicyMetadata response type. +message MsgUpdateGroupPolicyMetadataResponse {} + +// +// Proposals and Voting +// + +// Exec defines modes of execution of a proposal on creation or on new vote. +enum Exec { + + // An empty value means that there should be a separate + // MsgExec request for the proposal to execute. + EXEC_UNSPECIFIED = 0; + + // Try to execute the proposal immediately. + // If the proposal is not allowed per the DecisionPolicy, + // the proposal will still be open and could + // be executed at a later point. + EXEC_TRY = 1; +} + +// MsgSubmitProposal is the Msg/SubmitProposal request type. +message MsgSubmitProposal { + option (cosmos.msg.v1.signer) = "proposers"; + + option (gogoproto.goproto_getters) = false; + + // address is the account address of group policy. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // proposers are the account addresses of the proposers. + // Proposers signatures will be counted as yes votes. + repeated string proposers = 2; + + // metadata is any arbitrary metadata to attached to the proposal. + string metadata = 3; + + // messages is a list of `sdk.Msg`s that will be executed if the proposal passes. + repeated google.protobuf.Any messages = 4; + + // exec defines the mode of execution of the proposal, + // whether it should be executed immediately on creation or not. + // If so, proposers signatures are considered as Yes votes. + Exec exec = 5; +} + +// MsgSubmitProposalResponse is the Msg/SubmitProposal response type. +message MsgSubmitProposalResponse { + + // proposal is the unique ID of the proposal. + uint64 proposal_id = 1; +} + +// MsgWithdrawProposal is the Msg/WithdrawProposal request type. +message MsgWithdrawProposal { + // proposal is the unique ID of the proposal. + uint64 proposal_id = 1; + + // address is the admin of the group policy or one of the proposer of the proposal. + string address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgWithdrawProposalResponse is the Msg/WithdrawProposal response type. +message MsgWithdrawProposalResponse {} + +// MsgVote is the Msg/Vote request type. +message MsgVote { + option (cosmos.msg.v1.signer) = "voter"; + + // proposal is the unique ID of the proposal. + uint64 proposal_id = 1; + // voter is the voter account address. + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // option is the voter's choice on the proposal. + VoteOption option = 3; + + // metadata is any arbitrary metadata to attached to the vote. + string metadata = 4; + + // exec defines whether the proposal should be executed + // immediately after voting or not. + Exec exec = 5; +} + +// MsgVoteResponse is the Msg/Vote response type. +message MsgVoteResponse {} + +// MsgExec is the Msg/Exec request type. +message MsgExec { + option (cosmos.msg.v1.signer) = "signer"; + + // proposal is the unique ID of the proposal. + uint64 proposal_id = 1; + + // signer is the account address used to execute the proposal. + string signer = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgExecResponse is the Msg/Exec request type. +message MsgExecResponse {} + +// MsgLeaveGroup is the Msg/LeaveGroup request type. +message MsgLeaveGroup { + option (cosmos.msg.v1.signer) = "address"; + + // address is the account address of the group member. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // group_id is the unique ID of the group. + uint64 group_id = 2; +} + +// MsgLeaveGroupResponse is the Msg/LeaveGroup response type. +message MsgLeaveGroupResponse {} diff --git a/packages/codegen/proto/cosmos/group/v1/types.proto b/packages/codegen/proto/cosmos/group/v1/types.proto new file mode 100644 index 00000000..604fe0ae --- /dev/null +++ b/packages/codegen/proto/cosmos/group/v1/types.proto @@ -0,0 +1,308 @@ +syntax = "proto3"; + +package cosmos.group.v1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/group"; + +import "gogoproto/gogo.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; +import "cosmos_proto/cosmos.proto"; +import "google/protobuf/any.proto"; + +// Member represents a group member with an account address, +// non-zero weight and metadata. +message Member { + + // address is the member's account address. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // weight is the member's voting weight that should be greater than 0. + string weight = 2; + + // metadata is any arbitrary metadata to attached to the member. + string metadata = 3; + + // added_at is a timestamp specifying when a member was added. + google.protobuf.Timestamp added_at = 4 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; +} + +// Members defines a repeated slice of Member objects. +message Members { + + // members is the list of members. + repeated Member members = 1 [(gogoproto.nullable) = false]; +} + +// ThresholdDecisionPolicy implements the DecisionPolicy interface +message ThresholdDecisionPolicy { + option (cosmos_proto.implements_interface) = "cosmos.group.DecisionPolicy"; + + // threshold is the minimum weighted sum of yes votes that must be met or exceeded for a proposal to succeed. + string threshold = 1; + + // windows defines the different windows for voting and execution. + DecisionPolicyWindows windows = 2; +} + +// PercentageDecisionPolicy implements the DecisionPolicy interface +message PercentageDecisionPolicy { + option (cosmos_proto.implements_interface) = "cosmos.group.DecisionPolicy"; + + // percentage is the minimum percentage the weighted sum of yes votes must meet for a proposal to succeed. + string percentage = 1; + + // windows defines the different windows for voting and execution. + DecisionPolicyWindows windows = 2; +} + +// DecisionPolicyWindows defines the different windows for voting and execution. +message DecisionPolicyWindows { + // voting_period is the duration from submission of a proposal to the end of voting period + // Within this times votes can be submitted with MsgVote. + google.protobuf.Duration voting_period = 1 [(gogoproto.stdduration) = true, (gogoproto.nullable) = false]; + + // min_execution_period is the minimum duration after the proposal submission + // where members can start sending MsgExec. This means that the window for + // sending a MsgExec transaction is: + // `[ submission + min_execution_period ; submission + voting_period + max_execution_period]` + // where max_execution_period is a app-specific config, defined in the keeper. + // If not set, min_execution_period will default to 0. + // + // Please make sure to set a `min_execution_period` that is smaller than + // `voting_period + max_execution_period`, or else the above execution window + // is empty, meaning that all proposals created with this decision policy + // won't be able to be executed. + google.protobuf.Duration min_execution_period = 2 [(gogoproto.stdduration) = true, (gogoproto.nullable) = false]; +} + +// VoteOption enumerates the valid vote options for a given proposal. +enum VoteOption { + option (gogoproto.goproto_enum_prefix) = false; + + // VOTE_OPTION_UNSPECIFIED defines a no-op vote option. + VOTE_OPTION_UNSPECIFIED = 0; + // VOTE_OPTION_YES defines a yes vote option. + VOTE_OPTION_YES = 1; + // VOTE_OPTION_ABSTAIN defines an abstain vote option. + VOTE_OPTION_ABSTAIN = 2; + // VOTE_OPTION_NO defines a no vote option. + VOTE_OPTION_NO = 3; + // VOTE_OPTION_NO_WITH_VETO defines a no with veto vote option. + VOTE_OPTION_NO_WITH_VETO = 4; +} + +// +// State +// + +// GroupInfo represents the high-level on-chain information for a group. +message GroupInfo { + + // id is the unique ID of the group. + uint64 id = 1; + + // admin is the account address of the group's admin. + string admin = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // metadata is any arbitrary metadata to attached to the group. + string metadata = 3; + + // version is used to track changes to a group's membership structure that + // would break existing proposals. Whenever any members weight is changed, + // or any member is added or removed this version is incremented and will + // cause proposals based on older versions of this group to fail + uint64 version = 4; + + // total_weight is the sum of the group members' weights. + string total_weight = 5; + + // created_at is a timestamp specifying when a group was created. + google.protobuf.Timestamp created_at = 6 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; +} + +// GroupMember represents the relationship between a group and a member. +message GroupMember { + + // group_id is the unique ID of the group. + uint64 group_id = 1; + + // member is the member data. + Member member = 2; +} + +// GroupPolicyInfo represents the high-level on-chain information for a group policy. +message GroupPolicyInfo { + option (gogoproto.equal) = true; + option (gogoproto.goproto_getters) = false; + + // address is the account address of group policy. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // group_id is the unique ID of the group. + uint64 group_id = 2; + + // admin is the account address of the group admin. + string admin = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // metadata is any arbitrary metadata to attached to the group policy. + string metadata = 4; + + // version is used to track changes to a group's GroupPolicyInfo structure that + // would create a different result on a running proposal. + uint64 version = 5; + + // decision_policy specifies the group policy's decision policy. + google.protobuf.Any decision_policy = 6 [(cosmos_proto.accepts_interface) = "cosmos.group.DecisionPolicy"]; + + // created_at is a timestamp specifying when a group policy was created. + google.protobuf.Timestamp created_at = 7 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; +} + +// Proposal defines a group proposal. Any member of a group can submit a proposal +// for a group policy to decide upon. +// A proposal consists of a set of `sdk.Msg`s that will be executed if the proposal +// passes as well as some optional metadata associated with the proposal. +message Proposal { + option (gogoproto.goproto_getters) = false; + + // id is the unique id of the proposal. + uint64 id = 1; + + // address is the account address of group policy. + string address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // metadata is any arbitrary metadata to attached to the proposal. + string metadata = 3; + + // proposers are the account addresses of the proposers. + repeated string proposers = 4 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // submit_time is a timestamp specifying when a proposal was submitted. + google.protobuf.Timestamp submit_time = 5 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + + // group_version tracks the version of the group that this proposal corresponds to. + // When group membership is changed, existing proposals from previous group versions will become invalid. + uint64 group_version = 6; + + // group_policy_version tracks the version of the group policy that this proposal corresponds to. + // When a decision policy is changed, existing proposals from previous policy versions will become invalid. + uint64 group_policy_version = 7; + + // status represents the high level position in the life cycle of the proposal. Initial value is Submitted. + ProposalStatus status = 8; + + // result is the final result based on the votes and election rule. Initial value is unfinalized. + // The result is persisted so that clients can always rely on this state and not have to replicate the logic. + ProposalResult result = 9; + + // final_tally_result contains the sums of all weighted votes for this + // proposal for each vote option, after tallying. When querying a proposal + // via gRPC, this field is not populated until the proposal's voting period + // has ended. + TallyResult final_tally_result = 10 [(gogoproto.nullable) = false]; + + // voting_period_end is the timestamp before which voting must be done. + // Unless a successfull MsgExec is called before (to execute a proposal whose + // tally is successful before the voting period ends), tallying will be done + // at this point, and the `final_tally_result`, as well + // as `status` and `result` fields will be accordingly updated. + google.protobuf.Timestamp voting_period_end = 11 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + + // executor_result is the final result based on the votes and election rule. Initial value is NotRun. + ProposalExecutorResult executor_result = 12; + + // messages is a list of Msgs that will be executed if the proposal passes. + repeated google.protobuf.Any messages = 13; +} + +// ProposalStatus defines proposal statuses. +enum ProposalStatus { + option (gogoproto.goproto_enum_prefix) = false; + + // An empty value is invalid and not allowed. + PROPOSAL_STATUS_UNSPECIFIED = 0; + + // Initial status of a proposal when persisted. + PROPOSAL_STATUS_SUBMITTED = 1; + + // Final status of a proposal when the final tally was executed. + PROPOSAL_STATUS_CLOSED = 2; + + // Final status of a proposal when the group was modified before the final tally. + PROPOSAL_STATUS_ABORTED = 3; + + // A proposal can be deleted before the voting start time by the owner. When this happens the final status + // is Withdrawn. + PROPOSAL_STATUS_WITHDRAWN = 4; +} + +// ProposalResult defines types of proposal results. +enum ProposalResult { + option (gogoproto.goproto_enum_prefix) = false; + + // An empty value is invalid and not allowed + PROPOSAL_RESULT_UNSPECIFIED = 0; + + // Until a final tally has happened the status is unfinalized + PROPOSAL_RESULT_UNFINALIZED = 1; + + // Final result of the tally + PROPOSAL_RESULT_ACCEPTED = 2; + + // Final result of the tally + PROPOSAL_RESULT_REJECTED = 3; +} + +// ProposalExecutorResult defines types of proposal executor results. +enum ProposalExecutorResult { + option (gogoproto.goproto_enum_prefix) = false; + + // An empty value is not allowed. + PROPOSAL_EXECUTOR_RESULT_UNSPECIFIED = 0; + + // We have not yet run the executor. + PROPOSAL_EXECUTOR_RESULT_NOT_RUN = 1; + + // The executor was successful and proposed action updated state. + PROPOSAL_EXECUTOR_RESULT_SUCCESS = 2; + + // The executor returned an error and proposed action didn't update state. + PROPOSAL_EXECUTOR_RESULT_FAILURE = 3; +} + +// TallyResult represents the sum of weighted votes for each vote option. +message TallyResult { + option (gogoproto.goproto_getters) = false; + + // yes_count is the weighted sum of yes votes. + string yes_count = 1; + + // abstain_count is the weighted sum of abstainers. + string abstain_count = 2; + + // no is the weighted sum of no votes. + string no_count = 3; + + // no_with_veto_count is the weighted sum of veto. + string no_with_veto_count = 4; +} + +// Vote represents a vote for a proposal. +message Vote { + + // proposal is the unique ID of the proposal. + uint64 proposal_id = 1; + + // voter is the account address of the voter. + string voter = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // option is the voter's choice on the proposal. + VoteOption option = 3; + + // metadata is any arbitrary metadata to attached to the vote. + string metadata = 4; + + // submit_time is the timestamp when the vote was submitted. + google.protobuf.Timestamp submit_time = 5 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; +} diff --git a/packages/codegen/proto/cosmos/mint/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/mint/v1beta1/genesis.proto new file mode 100644 index 00000000..4e783fb5 --- /dev/null +++ b/packages/codegen/proto/cosmos/mint/v1beta1/genesis.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; +package cosmos.mint.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos/mint/v1beta1/mint.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/mint/types"; + +// GenesisState defines the mint module's genesis state. +message GenesisState { + // minter is a space for holding current inflation information. + Minter minter = 1 [(gogoproto.nullable) = false]; + + // params defines all the paramaters of the module. + Params params = 2 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/mint/v1beta1/mint.proto b/packages/codegen/proto/cosmos/mint/v1beta1/mint.proto new file mode 100644 index 00000000..9cfe2b76 --- /dev/null +++ b/packages/codegen/proto/cosmos/mint/v1beta1/mint.proto @@ -0,0 +1,57 @@ +syntax = "proto3"; +package cosmos.mint.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/mint/types"; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; + +// Minter represents the minting state. +message Minter { + // current annual inflation rate + string inflation = 1 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + // current annual expected provisions + string annual_provisions = 2 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; +} + +// Params holds parameters for the mint module. +message Params { + option (gogoproto.goproto_stringer) = false; + + // type of coin to mint + string mint_denom = 1; + // maximum annual change in inflation rate + string inflation_rate_change = 2 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + // maximum inflation rate + string inflation_max = 3 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + // minimum inflation rate + string inflation_min = 4 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + // goal of percent bonded atoms + string goal_bonded = 5 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + // expected blocks per year + uint64 blocks_per_year = 6; +} diff --git a/packages/codegen/proto/cosmos/mint/v1beta1/query.proto b/packages/codegen/proto/cosmos/mint/v1beta1/query.proto new file mode 100644 index 00000000..acd341d7 --- /dev/null +++ b/packages/codegen/proto/cosmos/mint/v1beta1/query.proto @@ -0,0 +1,57 @@ +syntax = "proto3"; +package cosmos.mint.v1beta1; + +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "cosmos/mint/v1beta1/mint.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/mint/types"; + +// Query provides defines the gRPC querier service. +service Query { + // Params returns the total set of minting parameters. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/cosmos/mint/v1beta1/params"; + } + + // Inflation returns the current minting inflation value. + rpc Inflation(QueryInflationRequest) returns (QueryInflationResponse) { + option (google.api.http).get = "/cosmos/mint/v1beta1/inflation"; + } + + // AnnualProvisions current minting annual provisions value. + rpc AnnualProvisions(QueryAnnualProvisionsRequest) returns (QueryAnnualProvisionsResponse) { + option (google.api.http).get = "/cosmos/mint/v1beta1/annual_provisions"; + } +} + +// QueryParamsRequest is the request type for the Query/Params RPC method. +message QueryParamsRequest {} + +// QueryParamsResponse is the response type for the Query/Params RPC method. +message QueryParamsResponse { + // params defines the parameters of the module. + Params params = 1 [(gogoproto.nullable) = false]; +} + +// QueryInflationRequest is the request type for the Query/Inflation RPC method. +message QueryInflationRequest {} + +// QueryInflationResponse is the response type for the Query/Inflation RPC +// method. +message QueryInflationResponse { + // inflation is the current minting inflation value. + bytes inflation = 1 [(gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", (gogoproto.nullable) = false]; +} + +// QueryAnnualProvisionsRequest is the request type for the +// Query/AnnualProvisions RPC method. +message QueryAnnualProvisionsRequest {} + +// QueryAnnualProvisionsResponse is the response type for the +// Query/AnnualProvisions RPC method. +message QueryAnnualProvisionsResponse { + // annual_provisions is the current minting annual provisions value. + bytes annual_provisions = 1 + [(gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", (gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/msg/v1/msg.proto b/packages/codegen/proto/cosmos/msg/v1/msg.proto new file mode 100644 index 00000000..89bdf312 --- /dev/null +++ b/packages/codegen/proto/cosmos/msg/v1/msg.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +package cosmos.msg.v1; + +import "google/protobuf/descriptor.proto"; + +// TODO(fdymylja): once we fully migrate to protov2 the go_package needs to be updated. +// We need this right now because gogoproto codegen needs to import the extension. +option go_package = "github.com/cosmos/cosmos-sdk/types/msgservice"; + +extend google.protobuf.MessageOptions { + // signer must be used in cosmos messages in order + // to signal to external clients which fields in a + // given cosmos message must be filled with signer + // information (address). + // The field must be the protobuf name of the message + // field extended with this MessageOption. + // The field must either be of string kind, or of message + // kind in case the signer information is contained within + // a message inside the cosmos message. + repeated string signer = 11110000; +} \ No newline at end of file diff --git a/packages/codegen/proto/cosmos/nft/v1beta1/event.proto b/packages/codegen/proto/cosmos/nft/v1beta1/event.proto new file mode 100644 index 00000000..96964f08 --- /dev/null +++ b/packages/codegen/proto/cosmos/nft/v1beta1/event.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; +package cosmos.nft.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/nft"; + +// EventSend is emitted on Msg/Send +message EventSend { + string class_id = 1; + string id = 2; + string sender = 3; + string receiver = 4; +} + +// EventMint is emitted on Mint +message EventMint { + string class_id = 1; + string id = 2; + string owner = 3; +} + +// EventBurn is emitted on Burn +message EventBurn { + string class_id = 1; + string id = 2; + string owner = 3; +} diff --git a/packages/codegen/proto/cosmos/nft/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/nft/v1beta1/genesis.proto new file mode 100644 index 00000000..6f36ed34 --- /dev/null +++ b/packages/codegen/proto/cosmos/nft/v1beta1/genesis.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; +package cosmos.nft.v1beta1; + +import "cosmos/nft/v1beta1/nft.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/nft"; + +// GenesisState defines the nft module's genesis state. +message GenesisState { + // class defines the class of the nft type. + repeated cosmos.nft.v1beta1.Class classes = 1; + repeated Entry entries = 2; +} + +// Entry Defines all nft owned by a person +message Entry { + // owner is the owner address of the following nft + string owner = 1; + + // nfts is a group of nfts of the same owner + repeated cosmos.nft.v1beta1.NFT nfts = 2; +} diff --git a/packages/codegen/proto/cosmos/nft/v1beta1/nft.proto b/packages/codegen/proto/cosmos/nft/v1beta1/nft.proto new file mode 100644 index 00000000..b1241260 --- /dev/null +++ b/packages/codegen/proto/cosmos/nft/v1beta1/nft.proto @@ -0,0 +1,48 @@ +syntax = "proto3"; +package cosmos.nft.v1beta1; + +import "google/protobuf/any.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/nft"; + +// Class defines the class of the nft type. +message Class { + // id defines the unique identifier of the NFT classification, similar to the contract address of ERC721 + string id = 1; + + // name defines the human-readable name of the NFT classification. Optional + string name = 2; + + // symbol is an abbreviated name for nft classification. Optional + string symbol = 3; + + // description is a brief description of nft classification. Optional + string description = 4; + + // uri for the class metadata stored off chain. It can define schema for Class and NFT `Data` attributes. Optional + string uri = 5; + + // uri_hash is a hash of the document pointed by uri. Optional + string uri_hash = 6; + + // data is the app specific metadata of the NFT class. Optional + google.protobuf.Any data = 7; +} + +// NFT defines the NFT. +message NFT { + // class_id associated with the NFT, similar to the contract address of ERC721 + string class_id = 1; + + // id is a unique identifier of the NFT + string id = 2; + + // uri for the NFT metadata stored off chain + string uri = 3; + + // uri_hash is a hash of the document pointed by uri + string uri_hash = 4; + + // data is an app specific data of the NFT. Optional + google.protobuf.Any data = 10; +} diff --git a/packages/codegen/proto/cosmos/nft/v1beta1/query.proto b/packages/codegen/proto/cosmos/nft/v1beta1/query.proto new file mode 100644 index 00000000..c1d8070f --- /dev/null +++ b/packages/codegen/proto/cosmos/nft/v1beta1/query.proto @@ -0,0 +1,125 @@ +syntax = "proto3"; +package cosmos.nft.v1beta1; + +import "cosmos/base/query/v1beta1/pagination.proto"; +import "google/api/annotations.proto"; +import "cosmos/nft/v1beta1/nft.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/nft"; + +// Query defines the gRPC querier service. +service Query { + // Balance queries the number of NFTs of a given class owned by the owner, same as balanceOf in ERC721 + rpc Balance(QueryBalanceRequest) returns (QueryBalanceResponse) { + option (google.api.http).get = "/cosmos/nft/v1beta1/balance/{owner}/{class_id}"; + } + + // Owner queries the owner of the NFT based on its class and id, same as ownerOf in ERC721 + rpc Owner(QueryOwnerRequest) returns (QueryOwnerResponse) { + option (google.api.http).get = "/cosmos/nft/v1beta1/owner/{class_id}/{id}"; + } + + // Supply queries the number of NFTs from the given class, same as totalSupply of ERC721. + rpc Supply(QuerySupplyRequest) returns (QuerySupplyResponse) { + option (google.api.http).get = "/cosmos/nft/v1beta1/supply/{class_id}"; + } + + // NFTs queries all NFTs of a given class or owner,choose at least one of the two, similar to tokenByIndex in + // ERC721Enumerable + rpc NFTs(QueryNFTsRequest) returns (QueryNFTsResponse) { + option (google.api.http).get = "/cosmos/nft/v1beta1/nfts"; + } + + // NFT queries an NFT based on its class and id. + rpc NFT(QueryNFTRequest) returns (QueryNFTResponse) { + option (google.api.http).get = "/cosmos/nft/v1beta1/nfts/{class_id}/{id}"; + } + + // Class queries an NFT class based on its id + rpc Class(QueryClassRequest) returns (QueryClassResponse) { + option (google.api.http).get = "/cosmos/nft/v1beta1/classes/{class_id}"; + } + + // Classes queries all NFT classes + rpc Classes(QueryClassesRequest) returns (QueryClassesResponse) { + option (google.api.http).get = "/cosmos/nft/v1beta1/classes"; + } +} + +// QueryBalanceRequest is the request type for the Query/Balance RPC method +message QueryBalanceRequest { + string class_id = 1; + string owner = 2; +} + +// QueryBalanceResponse is the response type for the Query/Balance RPC method +message QueryBalanceResponse { + uint64 amount = 1; +} + +// QueryOwnerRequest is the request type for the Query/Owner RPC method +message QueryOwnerRequest { + string class_id = 1; + string id = 2; +} + +// QueryOwnerResponse is the response type for the Query/Owner RPC method +message QueryOwnerResponse { + string owner = 1; +} + +// QuerySupplyRequest is the request type for the Query/Supply RPC method +message QuerySupplyRequest { + string class_id = 1; +} + +// QuerySupplyResponse is the response type for the Query/Supply RPC method +message QuerySupplyResponse { + uint64 amount = 1; +} + +// QueryNFTstRequest is the request type for the Query/NFTs RPC method +message QueryNFTsRequest { + string class_id = 1; + string owner = 2; + cosmos.base.query.v1beta1.PageRequest pagination = 3; +} + +// QueryNFTsResponse is the response type for the Query/NFTs RPC methods +message QueryNFTsResponse { + repeated cosmos.nft.v1beta1.NFT nfts = 1; + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryNFTRequest is the request type for the Query/NFT RPC method +message QueryNFTRequest { + string class_id = 1; + string id = 2; +} + +// QueryNFTResponse is the response type for the Query/NFT RPC method +message QueryNFTResponse { + cosmos.nft.v1beta1.NFT nft = 1; +} + +// QueryClassRequest is the request type for the Query/Class RPC method +message QueryClassRequest { + string class_id = 1; +} + +// QueryClassResponse is the response type for the Query/Class RPC method +message QueryClassResponse { + cosmos.nft.v1beta1.Class class = 1; +} + +// QueryClassesRequest is the request type for the Query/Classes RPC method +message QueryClassesRequest { + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// QueryClassesResponse is the response type for the Query/Classes RPC method +message QueryClassesResponse { + repeated cosmos.nft.v1beta1.Class classes = 1; + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} diff --git a/packages/codegen/proto/cosmos/nft/v1beta1/tx.proto b/packages/codegen/proto/cosmos/nft/v1beta1/tx.proto new file mode 100644 index 00000000..95b402ce --- /dev/null +++ b/packages/codegen/proto/cosmos/nft/v1beta1/tx.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; +package cosmos.nft.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/nft"; + +import "cosmos/msg/v1/msg.proto"; + +// Msg defines the nft Msg service. +service Msg { + // Send defines a method to send a nft from one account to another account. + rpc Send(MsgSend) returns (MsgSendResponse); +} +// MsgSend represents a message to send a nft from one account to another account. +message MsgSend { + option (cosmos.msg.v1.signer) = "sender"; + + // class_id defines the unique identifier of the nft classification, similar to the contract address of ERC721 + string class_id = 1; + + // id defines the unique identification of nft + string id = 2; + + // sender is the address of the owner of nft + string sender = 3; + + // receiver is the receiver address of nft + string receiver = 4; +} +// MsgSendResponse defines the Msg/Send response type. +message MsgSendResponse {} \ No newline at end of file diff --git a/packages/codegen/proto/cosmos/orm/v1/orm.proto b/packages/codegen/proto/cosmos/orm/v1/orm.proto new file mode 100644 index 00000000..abfbbd4f --- /dev/null +++ b/packages/codegen/proto/cosmos/orm/v1/orm.proto @@ -0,0 +1,104 @@ +syntax = "proto3"; + +package cosmos.orm.v1; + +import "google/protobuf/descriptor.proto"; + +extend google.protobuf.MessageOptions { + + // table specifies that this message will be used as an ORM table. It cannot + // be used together with the singleton option. + TableDescriptor table = 104503790; + + // singleton specifies that this message will be used as an ORM singleton. It cannot + // be used together with the table option. + SingletonDescriptor singleton = 104503791; +} + +// TableDescriptor describes an ORM table. +message TableDescriptor { + + // primary_key defines the primary key for the table. + PrimaryKeyDescriptor primary_key = 1; + + // index defines one or more secondary indexes. + repeated SecondaryIndexDescriptor index = 2; + + // id is a non-zero integer ID that must be unique within the + // tables and singletons in this file. It may be deprecated in the future when this + // can be auto-generated. + uint32 id = 3; +} + +// PrimaryKeyDescriptor describes a table primary key. +message PrimaryKeyDescriptor { + + // fields is a comma-separated list of fields in the primary key. Spaces are + // not allowed. Supported field types, their encodings, and any applicable constraints + // are described below. + // - uint32 are encoded as 2,3,4 or 5 bytes using a compact encoding that + // is suitable for sorted iteration (not varint encoding). This type is + // well-suited for small integers. + // - uint64 are encoded as 2,4,6 or 9 bytes using a compact encoding that + // is suitable for sorted iteration (not varint encoding). This type is + // well-suited for small integers such as auto-incrementing sequences. + // - fixed32, fixed64 are encoded as big-endian fixed width bytes and support + // sorted iteration. These types are well-suited for encoding fixed with + // decimals as integers. + // - string's are encoded as raw bytes in terminal key segments and null-terminated + // in non-terminal segments. Null characters are thus forbidden in strings. + // string fields support sorted iteration. + // - bytes are encoded as raw bytes in terminal segments and length-prefixed + // with a 32-bit unsigned varint in non-terminal segments. + // - int32, sint32, int64, sint64, sfixed32, sfixed64 are encoded as fixed width bytes with + // an encoding that enables sorted iteration. + // - google.protobuf.Timestamp and google.protobuf.Duration are encoded + // as 12 bytes using an encoding that enables sorted iteration. + // - enum fields are encoded using varint encoding and do not support sorted + // iteration. + // - bool fields are encoded as a single byte 0 or 1. + // + // All other fields types are unsupported in keys including repeated and + // oneof fields. + // + // Primary keys are prefixed by the varint encoded table id and the byte 0x0 + // plus any additional prefix specified by the schema. + string fields = 1; + + // auto_increment specifies that the primary key is generated by an + // auto-incrementing integer. If this is set to true fields must only + // contain one field of that is of type uint64. + bool auto_increment = 2; +} + +// PrimaryKeyDescriptor describes a table secondary index. +message SecondaryIndexDescriptor { + + // fields is a comma-separated list of fields in the index. The supported + // field types are the same as those for PrimaryKeyDescriptor.fields. + // Index keys are prefixed by the varint encoded table id and the varint + // encoded index id plus any additional prefix specified by the schema. + // + // In addition the the field segments, non-unique index keys are suffixed with + // any additional primary key fields not present in the index fields so that the + // primary key can be reconstructed. Unique indexes instead of being suffixed + // store the remaining primary key fields in the value.. + string fields = 1; + + // id is a non-zero integer ID that must be unique within the indexes for this + // table and less than 32768. It may be deprecated in the future when this can + // be auto-generated. + uint32 id = 2; + + // unique specifies that this an unique index. + bool unique = 3; +} + +// TableDescriptor describes an ORM singleton table which has at most one instance. +message SingletonDescriptor { + + // id is a non-zero integer ID that must be unique within the + // tables and singletons in this file. It may be deprecated in the future when this + // can be auto-generated. + uint32 id = 1; +} \ No newline at end of file diff --git a/packages/codegen/proto/cosmos/orm/v1alpha1/schema.proto b/packages/codegen/proto/cosmos/orm/v1alpha1/schema.proto new file mode 100644 index 00000000..ab713340 --- /dev/null +++ b/packages/codegen/proto/cosmos/orm/v1alpha1/schema.proto @@ -0,0 +1,76 @@ +syntax = "proto3"; + +package cosmos.orm.v1alpha1; + +import "google/protobuf/descriptor.proto"; + +extend google.protobuf.MessageOptions { + // module_schema is used to define the ORM schema for an app module. + // All module config messages that use module_schema must also declare + // themselves as app module config messages using the cosmos.app.v1.is_module + // option. + ModuleSchemaDescriptor module_schema = 104503792; +} + +// ModuleSchemaDescriptor describe's a module's ORM schema. +message ModuleSchemaDescriptor { + repeated FileEntry schema_file = 1; + + // FileEntry describes an ORM file used in a module. + message FileEntry { + // id is a prefix that will be varint encoded and prepended to all the + // table keys specified in the file's tables. + uint32 id = 1; + + // proto_file_name is the name of a file .proto in that contains + // table definitions. The .proto file must be in a package that the + // module has referenced using cosmos.app.v1.ModuleDescriptor.use_package. + string proto_file_name = 2; + + // storage_type optionally indicates the type of storage this file's + // tables should used. If it is left unspecified, the default KV-storage + // of the app will be used. + StorageType storage_type = 3; + } + + // prefix is an optional prefix that precedes all keys in this module's + // store. + bytes prefix = 2; +} + +// StorageType +enum StorageType { + // STORAGE_TYPE_DEFAULT_UNSPECIFIED indicates the persistent + // KV-storage where primary key entries are stored in merkle-tree + // backed commitment storage and indexes and seqs are stored in + // fast index storage. Note that the Cosmos SDK before store/v2alpha1 + // does not support this. + STORAGE_TYPE_DEFAULT_UNSPECIFIED = 0; + + // STORAGE_TYPE_MEMORY indicates in-memory storage that will be + // reloaded every time an app restarts. Tables with this type of storage + // will by default be ignored when importing and exporting a module's + // state from JSON. + STORAGE_TYPE_MEMORY = 1; + + // STORAGE_TYPE_TRANSIENT indicates transient storage that is reset + // at the end of every block. Tables with this type of storage + // will by default be ignored when importing and exporting a module's + // state from JSON. + STORAGE_TYPE_TRANSIENT = 2; + + // STORAGE_TYPE_INDEX indicates persistent storage which is not backed + // by a merkle-tree and won't affect the app hash. Note that the Cosmos SDK + // before store/v2alpha1 does not support this. + STORAGE_TYPE_INDEX = 3; + + // STORAGE_TYPE_INDEX indicates persistent storage which is backed by + // a merkle-tree. With this type of storage, both primary and index keys + // will affect the app hash and this is generally less efficient + // than using STORAGE_TYPE_DEFAULT_UNSPECIFIED which separates index + // keys into index storage. Note that modules built with the + // Cosmos SDK before store/v2alpha1 must specify STORAGE_TYPE_COMMITMENT + // instead of STORAGE_TYPE_DEFAULT_UNSPECIFIED or STORAGE_TYPE_INDEX + // because this is the only type of persistent storage available. + STORAGE_TYPE_COMMITMENT = 4; +} diff --git a/packages/codegen/proto/cosmos/params/v1beta1/params.proto b/packages/codegen/proto/cosmos/params/v1beta1/params.proto new file mode 100644 index 00000000..e5aabfec --- /dev/null +++ b/packages/codegen/proto/cosmos/params/v1beta1/params.proto @@ -0,0 +1,29 @@ +syntax = "proto3"; +package cosmos.params.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/params/types/proposal"; +option (gogoproto.equal_all) = true; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; + +// ParameterChangeProposal defines a proposal to change one or more parameters. +message ParameterChangeProposal { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + string title = 1; + string description = 2; + repeated ParamChange changes = 3 [(gogoproto.nullable) = false]; +} + +// ParamChange defines an individual parameter change, for use in +// ParameterChangeProposal. +message ParamChange { + option (gogoproto.goproto_stringer) = false; + + string subspace = 1; + string key = 2; + string value = 3; +} diff --git a/packages/codegen/proto/cosmos/params/v1beta1/query.proto b/packages/codegen/proto/cosmos/params/v1beta1/query.proto new file mode 100644 index 00000000..3b1c9a76 --- /dev/null +++ b/packages/codegen/proto/cosmos/params/v1beta1/query.proto @@ -0,0 +1,54 @@ +syntax = "proto3"; +package cosmos.params.v1beta1; + +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "cosmos/params/v1beta1/params.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/params/types/proposal"; + +// Query defines the gRPC querier service. +service Query { + // Params queries a specific parameter of a module, given its subspace and + // key. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/cosmos/params/v1beta1/params"; + } + + // Subspaces queries for all registered subspaces and all keys for a subspace. + rpc Subspaces(QuerySubspacesRequest) returns (QuerySubspacesResponse) { + option (google.api.http).get = "/cosmos/params/v1beta1/subspaces"; + } +} + +// QueryParamsRequest is request type for the Query/Params RPC method. +message QueryParamsRequest { + // subspace defines the module to query the parameter for. + string subspace = 1; + + // key defines the key of the parameter in the subspace. + string key = 2; +} + +// QueryParamsResponse is response type for the Query/Params RPC method. +message QueryParamsResponse { + // param defines the queried parameter. + ParamChange param = 1 [(gogoproto.nullable) = false]; +} + +// QuerySubspacesRequest defines a request type for querying for all registered +// subspaces and all keys for a subspace. +message QuerySubspacesRequest {} + +// QuerySubspacesResponse defines the response types for querying for all +// registered subspaces and all keys for a subspace. +message QuerySubspacesResponse { + repeated Subspace subspaces = 1; +} + +// Subspace defines a parameter subspace name and all the keys that exist for +// the subspace. +message Subspace { + string subspace = 1; + repeated string keys = 2; +} diff --git a/packages/codegen/proto/cosmos/slashing/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/slashing/v1beta1/genesis.proto new file mode 100644 index 00000000..312d56aa --- /dev/null +++ b/packages/codegen/proto/cosmos/slashing/v1beta1/genesis.proto @@ -0,0 +1,47 @@ +syntax = "proto3"; +package cosmos.slashing.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/slashing/types"; + +import "gogoproto/gogo.proto"; +import "cosmos/slashing/v1beta1/slashing.proto"; +import "cosmos_proto/cosmos.proto"; + +// GenesisState defines the slashing module's genesis state. +message GenesisState { + // params defines all the paramaters of related to deposit. + Params params = 1 [(gogoproto.nullable) = false]; + + // signing_infos represents a map between validator addresses and their + // signing infos. + repeated SigningInfo signing_infos = 2 [(gogoproto.nullable) = false]; + + // missed_blocks represents a map between validator addresses and their + // missed blocks. + repeated ValidatorMissedBlocks missed_blocks = 3 [(gogoproto.nullable) = false]; +} + +// SigningInfo stores validator signing info of corresponding address. +message SigningInfo { + // address is the validator address. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // validator_signing_info represents the signing info of this validator. + ValidatorSigningInfo validator_signing_info = 2 [(gogoproto.nullable) = false]; +} + +// ValidatorMissedBlocks contains array of missed blocks of corresponding +// address. +message ValidatorMissedBlocks { + // address is the validator address. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // missed_blocks is an array of missed blocks by the validator. + repeated MissedBlock missed_blocks = 2 [(gogoproto.nullable) = false]; +} + +// MissedBlock contains height and missed status as boolean. +message MissedBlock { + // index is the height at which the block was missed. + int64 index = 1; + // missed is the missed status. + bool missed = 2; +} diff --git a/packages/codegen/proto/cosmos/slashing/v1beta1/query.proto b/packages/codegen/proto/cosmos/slashing/v1beta1/query.proto new file mode 100644 index 00000000..f742c1f8 --- /dev/null +++ b/packages/codegen/proto/cosmos/slashing/v1beta1/query.proto @@ -0,0 +1,64 @@ +syntax = "proto3"; +package cosmos.slashing.v1beta1; + +import "cosmos/base/query/v1beta1/pagination.proto"; +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "cosmos/slashing/v1beta1/slashing.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/slashing/types"; + +// Query provides defines the gRPC querier service +service Query { + // Params queries the parameters of slashing module + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/cosmos/slashing/v1beta1/params"; + } + + // SigningInfo queries the signing info of given cons address + rpc SigningInfo(QuerySigningInfoRequest) returns (QuerySigningInfoResponse) { + option (google.api.http).get = "/cosmos/slashing/v1beta1/signing_infos/{cons_address}"; + } + + // SigningInfos queries signing info of all validators + rpc SigningInfos(QuerySigningInfosRequest) returns (QuerySigningInfosResponse) { + option (google.api.http).get = "/cosmos/slashing/v1beta1/signing_infos"; + } +} + +// QueryParamsRequest is the request type for the Query/Params RPC method +message QueryParamsRequest {} + +// QueryParamsResponse is the response type for the Query/Params RPC method +message QueryParamsResponse { + Params params = 1 [(gogoproto.nullable) = false]; +} + +// QuerySigningInfoRequest is the request type for the Query/SigningInfo RPC +// method +message QuerySigningInfoRequest { + // cons_address is the address to query signing info of + string cons_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QuerySigningInfoResponse is the response type for the Query/SigningInfo RPC +// method +message QuerySigningInfoResponse { + // val_signing_info is the signing info of requested val cons address + ValidatorSigningInfo val_signing_info = 1 [(gogoproto.nullable) = false]; +} + +// QuerySigningInfosRequest is the request type for the Query/SigningInfos RPC +// method +message QuerySigningInfosRequest { + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// QuerySigningInfosResponse is the response type for the Query/SigningInfos RPC +// method +message QuerySigningInfosResponse { + // info is the signing info of all validators + repeated cosmos.slashing.v1beta1.ValidatorSigningInfo info = 1 [(gogoproto.nullable) = false]; + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} diff --git a/packages/codegen/proto/cosmos/slashing/v1beta1/slashing.proto b/packages/codegen/proto/cosmos/slashing/v1beta1/slashing.proto new file mode 100644 index 00000000..0aa9f61f --- /dev/null +++ b/packages/codegen/proto/cosmos/slashing/v1beta1/slashing.proto @@ -0,0 +1,45 @@ +syntax = "proto3"; +package cosmos.slashing.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/slashing/types"; +option (gogoproto.equal_all) = true; + +import "gogoproto/gogo.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; +import "cosmos_proto/cosmos.proto"; + +// ValidatorSigningInfo defines a validator's signing info for monitoring their +// liveness activity. +message ValidatorSigningInfo { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // Height at which validator was first a candidate OR was unjailed + int64 start_height = 2; + // Index which is incremented each time the validator was a bonded + // in a block and may have signed a precommit or not. This in conjunction with the + // `SignedBlocksWindow` param determines the index in the `MissedBlocksBitArray`. + int64 index_offset = 3; + // Timestamp until which the validator is jailed due to liveness downtime. + google.protobuf.Timestamp jailed_until = 4 [(gogoproto.stdtime) = true, (gogoproto.nullable) = false]; + // Whether or not a validator has been tombstoned (killed out of validator set). It is set + // once the validator commits an equivocation or for any other configured misbehiavor. + bool tombstoned = 5; + // A counter kept to avoid unnecessary array reads. + // Note that `Sum(MissedBlocksBitArray)` always equals `MissedBlocksCounter`. + int64 missed_blocks_counter = 6; +} + +// Params represents the parameters used for by the slashing module. +message Params { + int64 signed_blocks_window = 1; + bytes min_signed_per_window = 2 + [(gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", (gogoproto.nullable) = false]; + google.protobuf.Duration downtime_jail_duration = 3 [(gogoproto.nullable) = false, (gogoproto.stdduration) = true]; + bytes slash_fraction_double_sign = 4 + [(gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", (gogoproto.nullable) = false]; + bytes slash_fraction_downtime = 5 + [(gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", (gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/slashing/v1beta1/tx.proto b/packages/codegen/proto/cosmos/slashing/v1beta1/tx.proto new file mode 100644 index 00000000..7c90304b --- /dev/null +++ b/packages/codegen/proto/cosmos/slashing/v1beta1/tx.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; +package cosmos.slashing.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/slashing/types"; +option (gogoproto.equal_all) = true; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/msg/v1/msg.proto"; + +// Msg defines the slashing Msg service. +service Msg { + // Unjail defines a method for unjailing a jailed validator, thus returning + // them into the bonded validator set, so they can begin receiving provisions + // and rewards again. + rpc Unjail(MsgUnjail) returns (MsgUnjailResponse); +} + +// MsgUnjail defines the Msg/Unjail request type +message MsgUnjail { + option (cosmos.msg.v1.signer) = "validator_addr"; + + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = true; + + string validator_addr = 1 [(cosmos_proto.scalar) = "cosmos.AddressString", (gogoproto.jsontag) = "address"]; +} + +// MsgUnjailResponse defines the Msg/Unjail response type +message MsgUnjailResponse {} diff --git a/packages/codegen/proto/cosmos/staking/v1beta1/authz.proto b/packages/codegen/proto/cosmos/staking/v1beta1/authz.proto new file mode 100644 index 00000000..981da1db --- /dev/null +++ b/packages/codegen/proto/cosmos/staking/v1beta1/authz.proto @@ -0,0 +1,47 @@ +syntax = "proto3"; +package cosmos.staking.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/base/v1beta1/coin.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/staking/types"; + +// StakeAuthorization defines authorization for delegate/undelegate/redelegate. +// +// Since: cosmos-sdk 0.43 +message StakeAuthorization { + option (cosmos_proto.implements_interface) = "cosmos.authz.Authorization"; + + // max_tokens specifies the maximum amount of tokens can be delegate to a validator. If it is + // empty, there is no spend limit and any amount of coins can be delegated. + cosmos.base.v1beta1.Coin max_tokens = 1 [(gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coin"]; + // validators is the oneof that represents either allow_list or deny_list + oneof validators { + // allow_list specifies list of validator addresses to whom grantee can delegate tokens on behalf of granter's + // account. + Validators allow_list = 2; + // deny_list specifies list of validator addresses to whom grantee can not delegate tokens. + Validators deny_list = 3; + } + // Validators defines list of validator addresses. + message Validators { + repeated string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + } + // authorization_type defines one of AuthorizationType. + AuthorizationType authorization_type = 4; +} + +// AuthorizationType defines the type of staking module authorization type +// +// Since: cosmos-sdk 0.43 +enum AuthorizationType { + // AUTHORIZATION_TYPE_UNSPECIFIED specifies an unknown authorization type + AUTHORIZATION_TYPE_UNSPECIFIED = 0; + // AUTHORIZATION_TYPE_DELEGATE defines an authorization type for Msg/Delegate + AUTHORIZATION_TYPE_DELEGATE = 1; + // AUTHORIZATION_TYPE_UNDELEGATE defines an authorization type for Msg/Undelegate + AUTHORIZATION_TYPE_UNDELEGATE = 2; + // AUTHORIZATION_TYPE_REDELEGATE defines an authorization type for Msg/BeginRedelegate + AUTHORIZATION_TYPE_REDELEGATE = 3; +} diff --git a/packages/codegen/proto/cosmos/staking/v1beta1/genesis.proto b/packages/codegen/proto/cosmos/staking/v1beta1/genesis.proto new file mode 100644 index 00000000..bf3c298e --- /dev/null +++ b/packages/codegen/proto/cosmos/staking/v1beta1/genesis.proto @@ -0,0 +1,49 @@ +syntax = "proto3"; +package cosmos.staking.v1beta1; + +option go_package = "github.com/cosmos/cosmos-sdk/x/staking/types"; + +import "gogoproto/gogo.proto"; +import "cosmos/staking/v1beta1/staking.proto"; +import "cosmos_proto/cosmos.proto"; + +// GenesisState defines the staking module's genesis state. +message GenesisState { + // params defines all the paramaters of related to deposit. + Params params = 1 [(gogoproto.nullable) = false]; + + // last_total_power tracks the total amounts of bonded tokens recorded during + // the previous end block. + bytes last_total_power = 2 + [(gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", (gogoproto.nullable) = false]; + + // last_validator_powers is a special index that provides a historical list + // of the last-block's bonded validators. + repeated LastValidatorPower last_validator_powers = 3 [(gogoproto.nullable) = false]; + + // delegations defines the validator set at genesis. + repeated Validator validators = 4 [(gogoproto.nullable) = false]; + + // delegations defines the delegations active at genesis. + repeated Delegation delegations = 5 [(gogoproto.nullable) = false]; + + // unbonding_delegations defines the unbonding delegations active at genesis. + repeated UnbondingDelegation unbonding_delegations = 6 [(gogoproto.nullable) = false]; + + // redelegations defines the redelegations active at genesis. + repeated Redelegation redelegations = 7 [(gogoproto.nullable) = false]; + + bool exported = 8; +} + +// LastValidatorPower required for validator set update logic. +message LastValidatorPower { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // address is the address of the validator. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // power defines the power of the validator. + int64 power = 2; +} diff --git a/packages/codegen/proto/cosmos/staking/v1beta1/query.proto b/packages/codegen/proto/cosmos/staking/v1beta1/query.proto new file mode 100644 index 00000000..02469232 --- /dev/null +++ b/packages/codegen/proto/cosmos/staking/v1beta1/query.proto @@ -0,0 +1,349 @@ +syntax = "proto3"; +package cosmos.staking.v1beta1; + +import "cosmos/base/query/v1beta1/pagination.proto"; +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "cosmos/staking/v1beta1/staking.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/staking/types"; + +// Query defines the gRPC querier service. +service Query { + // Validators queries all validators that match the given status. + rpc Validators(QueryValidatorsRequest) returns (QueryValidatorsResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/validators"; + } + + // Validator queries validator info for given validator address. + rpc Validator(QueryValidatorRequest) returns (QueryValidatorResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/validators/{validator_addr}"; + } + + // ValidatorDelegations queries delegate info for given validator. + rpc ValidatorDelegations(QueryValidatorDelegationsRequest) returns (QueryValidatorDelegationsResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/validators/{validator_addr}/delegations"; + } + + // ValidatorUnbondingDelegations queries unbonding delegations of a validator. + rpc ValidatorUnbondingDelegations(QueryValidatorUnbondingDelegationsRequest) + returns (QueryValidatorUnbondingDelegationsResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/validators/" + "{validator_addr}/unbonding_delegations"; + } + + // Delegation queries delegate info for given validator delegator pair. + rpc Delegation(QueryDelegationRequest) returns (QueryDelegationResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/validators/{validator_addr}/delegations/" + "{delegator_addr}"; + } + + // UnbondingDelegation queries unbonding info for given validator delegator + // pair. + rpc UnbondingDelegation(QueryUnbondingDelegationRequest) returns (QueryUnbondingDelegationResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/validators/{validator_addr}/delegations/" + "{delegator_addr}/unbonding_delegation"; + } + + // DelegatorDelegations queries all delegations of a given delegator address. + rpc DelegatorDelegations(QueryDelegatorDelegationsRequest) returns (QueryDelegatorDelegationsResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/delegations/{delegator_addr}"; + } + + // DelegatorUnbondingDelegations queries all unbonding delegations of a given + // delegator address. + rpc DelegatorUnbondingDelegations(QueryDelegatorUnbondingDelegationsRequest) + returns (QueryDelegatorUnbondingDelegationsResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/delegators/" + "{delegator_addr}/unbonding_delegations"; + } + + // Redelegations queries redelegations of given address. + rpc Redelegations(QueryRedelegationsRequest) returns (QueryRedelegationsResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/delegators/{delegator_addr}/redelegations"; + } + + // DelegatorValidators queries all validators info for given delegator + // address. + rpc DelegatorValidators(QueryDelegatorValidatorsRequest) returns (QueryDelegatorValidatorsResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/delegators/{delegator_addr}/validators"; + } + + // DelegatorValidator queries validator info for given delegator validator + // pair. + rpc DelegatorValidator(QueryDelegatorValidatorRequest) returns (QueryDelegatorValidatorResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/delegators/{delegator_addr}/validators/" + "{validator_addr}"; + } + + // HistoricalInfo queries the historical info for given height. + rpc HistoricalInfo(QueryHistoricalInfoRequest) returns (QueryHistoricalInfoResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/historical_info/{height}"; + } + + // Pool queries the pool info. + rpc Pool(QueryPoolRequest) returns (QueryPoolResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/pool"; + } + + // Parameters queries the staking parameters. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/cosmos/staking/v1beta1/params"; + } +} + +// QueryValidatorsRequest is request type for Query/Validators RPC method. +message QueryValidatorsRequest { + // status enables to query for validators matching a given status. + string status = 1; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryValidatorsResponse is response type for the Query/Validators RPC method +message QueryValidatorsResponse { + // validators contains all the queried validators. + repeated Validator validators = 1 [(gogoproto.nullable) = false]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryValidatorRequest is response type for the Query/Validator RPC method +message QueryValidatorRequest { + // validator_addr defines the validator address to query for. + string validator_addr = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryValidatorResponse is response type for the Query/Validator RPC method +message QueryValidatorResponse { + // validator defines the the validator info. + Validator validator = 1 [(gogoproto.nullable) = false]; +} + +// QueryValidatorDelegationsRequest is request type for the +// Query/ValidatorDelegations RPC method +message QueryValidatorDelegationsRequest { + // validator_addr defines the validator address to query for. + string validator_addr = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryValidatorDelegationsResponse is response type for the +// Query/ValidatorDelegations RPC method +message QueryValidatorDelegationsResponse { + repeated DelegationResponse delegation_responses = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "DelegationResponses"]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryValidatorUnbondingDelegationsRequest is required type for the +// Query/ValidatorUnbondingDelegations RPC method +message QueryValidatorUnbondingDelegationsRequest { + // validator_addr defines the validator address to query for. + string validator_addr = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryValidatorUnbondingDelegationsResponse is response type for the +// Query/ValidatorUnbondingDelegations RPC method. +message QueryValidatorUnbondingDelegationsResponse { + repeated UnbondingDelegation unbonding_responses = 1 [(gogoproto.nullable) = false]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryDelegationRequest is request type for the Query/Delegation RPC method. +message QueryDelegationRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // delegator_addr defines the delegator address to query for. + string delegator_addr = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // validator_addr defines the validator address to query for. + string validator_addr = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryDelegationResponse is response type for the Query/Delegation RPC method. +message QueryDelegationResponse { + // delegation_responses defines the delegation info of a delegation. + DelegationResponse delegation_response = 1; +} + +// QueryUnbondingDelegationRequest is request type for the +// Query/UnbondingDelegation RPC method. +message QueryUnbondingDelegationRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // delegator_addr defines the delegator address to query for. + string delegator_addr = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // validator_addr defines the validator address to query for. + string validator_addr = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryDelegationResponse is response type for the Query/UnbondingDelegation +// RPC method. +message QueryUnbondingDelegationResponse { + // unbond defines the unbonding information of a delegation. + UnbondingDelegation unbond = 1 [(gogoproto.nullable) = false]; +} + +// QueryDelegatorDelegationsRequest is request type for the +// Query/DelegatorDelegations RPC method. +message QueryDelegatorDelegationsRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // delegator_addr defines the delegator address to query for. + string delegator_addr = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryDelegatorDelegationsResponse is response type for the +// Query/DelegatorDelegations RPC method. +message QueryDelegatorDelegationsResponse { + // delegation_responses defines all the delegations' info of a delegator. + repeated DelegationResponse delegation_responses = 1 [(gogoproto.nullable) = false]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryDelegatorUnbondingDelegationsRequest is request type for the +// Query/DelegatorUnbondingDelegations RPC method. +message QueryDelegatorUnbondingDelegationsRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // delegator_addr defines the delegator address to query for. + string delegator_addr = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryUnbondingDelegatorDelegationsResponse is response type for the +// Query/UnbondingDelegatorDelegations RPC method. +message QueryDelegatorUnbondingDelegationsResponse { + repeated UnbondingDelegation unbonding_responses = 1 [(gogoproto.nullable) = false]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryRedelegationsRequest is request type for the Query/Redelegations RPC +// method. +message QueryRedelegationsRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // delegator_addr defines the delegator address to query for. + string delegator_addr = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // src_validator_addr defines the validator address to redelegate from. + string src_validator_addr = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // dst_validator_addr defines the validator address to redelegate to. + string dst_validator_addr = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 4; +} + +// QueryRedelegationsResponse is response type for the Query/Redelegations RPC +// method. +message QueryRedelegationsResponse { + repeated RedelegationResponse redelegation_responses = 1 [(gogoproto.nullable) = false]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryDelegatorValidatorsRequest is request type for the +// Query/DelegatorValidators RPC method. +message QueryDelegatorValidatorsRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // delegator_addr defines the delegator address to query for. + string delegator_addr = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryDelegatorValidatorsResponse is response type for the +// Query/DelegatorValidators RPC method. +message QueryDelegatorValidatorsResponse { + // validators defines the the validators' info of a delegator. + repeated Validator validators = 1 [(gogoproto.nullable) = false]; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryDelegatorValidatorRequest is request type for the +// Query/DelegatorValidator RPC method. +message QueryDelegatorValidatorRequest { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // delegator_addr defines the delegator address to query for. + string delegator_addr = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // validator_addr defines the validator address to query for. + string validator_addr = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// QueryDelegatorValidatorResponse response type for the +// Query/DelegatorValidator RPC method. +message QueryDelegatorValidatorResponse { + // validator defines the the validator info. + Validator validator = 1 [(gogoproto.nullable) = false]; +} + +// QueryHistoricalInfoRequest is request type for the Query/HistoricalInfo RPC +// method. +message QueryHistoricalInfoRequest { + // height defines at which height to query the historical info. + int64 height = 1; +} + +// QueryHistoricalInfoResponse is response type for the Query/HistoricalInfo RPC +// method. +message QueryHistoricalInfoResponse { + // hist defines the historical info at the given height. + HistoricalInfo hist = 1; +} + +// QueryPoolRequest is request type for the Query/Pool RPC method. +message QueryPoolRequest {} + +// QueryPoolResponse is response type for the Query/Pool RPC method. +message QueryPoolResponse { + // pool defines the pool info. + Pool pool = 1 [(gogoproto.nullable) = false]; +} + +// QueryParamsRequest is request type for the Query/Params RPC method. +message QueryParamsRequest {} + +// QueryParamsResponse is response type for the Query/Params RPC method. +message QueryParamsResponse { + // params holds all the parameters of this module. + Params params = 1 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/cosmos/staking/v1beta1/staking.proto b/packages/codegen/proto/cosmos/staking/v1beta1/staking.proto new file mode 100644 index 00000000..dcf2645f --- /dev/null +++ b/packages/codegen/proto/cosmos/staking/v1beta1/staking.proto @@ -0,0 +1,358 @@ +syntax = "proto3"; +package cosmos.staking.v1beta1; + +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; + +import "cosmos_proto/cosmos.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "tendermint/types/types.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/staking/types"; + +// HistoricalInfo contains header and validator information for a given block. +// It is stored as part of staking module's state, which persists the `n` most +// recent HistoricalInfo +// (`n` is set by the staking module's `historical_entries` parameter). +message HistoricalInfo { + tendermint.types.Header header = 1 [(gogoproto.nullable) = false]; + repeated Validator valset = 2 [(gogoproto.nullable) = false]; +} + +// CommissionRates defines the initial commission rates to be used for creating +// a validator. +message CommissionRates { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + + // rate is the commission rate charged to delegators, as a fraction. + string rate = 1 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + // max_rate defines the maximum commission rate which validator can ever charge, as a fraction. + string max_rate = 2 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + // max_change_rate defines the maximum daily increase of the validator commission, as a fraction. + string max_change_rate = 3 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; +} + +// Commission defines commission parameters for a given validator. +message Commission { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + + // commission_rates defines the initial commission rates to be used for creating a validator. + CommissionRates commission_rates = 1 [(gogoproto.embed) = true, (gogoproto.nullable) = false]; + // update_time is the last time the commission rate was changed. + google.protobuf.Timestamp update_time = 2 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; +} + +// Description defines a validator description. +message Description { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + + // moniker defines a human-readable name for the validator. + string moniker = 1; + // identity defines an optional identity signature (ex. UPort or Keybase). + string identity = 2; + // website defines an optional website link. + string website = 3; + // security_contact defines an optional email for security contact. + string security_contact = 4; + // details define other optional details. + string details = 5; +} + +// Validator defines a validator, together with the total amount of the +// Validator's bond shares and their exchange rate to coins. Slashing results in +// a decrease in the exchange rate, allowing correct calculation of future +// undelegations without iterating over delegators. When coins are delegated to +// this validator, the validator is credited with a delegation whose number of +// bond shares is based on the amount of coins delegated divided by the current +// exchange rate. Voting power can be calculated as total bonded shares +// multiplied by exchange rate. +message Validator { + option (gogoproto.equal) = false; + option (gogoproto.goproto_stringer) = false; + option (gogoproto.goproto_getters) = false; + + // operator_address defines the address of the validator's operator; bech encoded in JSON. + string operator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // consensus_pubkey is the consensus public key of the validator, as a Protobuf Any. + google.protobuf.Any consensus_pubkey = 2 [(cosmos_proto.accepts_interface) = "cosmos.crypto.PubKey"]; + // jailed defined whether the validator has been jailed from bonded status or not. + bool jailed = 3; + // status is the validator status (bonded/unbonding/unbonded). + BondStatus status = 4; + // tokens define the delegated tokens (incl. self-delegation). + string tokens = 5 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false + ]; + // delegator_shares defines total shares issued to a validator's delegators. + string delegator_shares = 6 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + // description defines the description terms for the validator. + Description description = 7 [(gogoproto.nullable) = false]; + // unbonding_height defines, if unbonding, the height at which this validator has begun unbonding. + int64 unbonding_height = 8; + // unbonding_time defines, if unbonding, the min time for the validator to complete unbonding. + google.protobuf.Timestamp unbonding_time = 9 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + // commission defines the commission parameters. + Commission commission = 10 [(gogoproto.nullable) = false]; + // min_self_delegation is the validator's self declared minimum self delegation. + string min_self_delegation = 11 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false + ]; +} + +// BondStatus is the status of a validator. +enum BondStatus { + option (gogoproto.goproto_enum_prefix) = false; + + // UNSPECIFIED defines an invalid validator status. + BOND_STATUS_UNSPECIFIED = 0 [(gogoproto.enumvalue_customname) = "Unspecified"]; + // UNBONDED defines a validator that is not bonded. + BOND_STATUS_UNBONDED = 1 [(gogoproto.enumvalue_customname) = "Unbonded"]; + // UNBONDING defines a validator that is unbonding. + BOND_STATUS_UNBONDING = 2 [(gogoproto.enumvalue_customname) = "Unbonding"]; + // BONDED defines a validator that is bonded. + BOND_STATUS_BONDED = 3 [(gogoproto.enumvalue_customname) = "Bonded"]; +} + +// ValAddresses defines a repeated set of validator addresses. +message ValAddresses { + option (gogoproto.goproto_stringer) = false; + option (gogoproto.stringer) = true; + + repeated string addresses = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// DVPair is struct that just has a delegator-validator pair with no other data. +// It is intended to be used as a marshalable pointer. For example, a DVPair can +// be used to construct the key to getting an UnbondingDelegation from state. +message DVPair { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string validator_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// DVPairs defines an array of DVPair objects. +message DVPairs { + repeated DVPair pairs = 1 [(gogoproto.nullable) = false]; +} + +// DVVTriplet is struct that just has a delegator-validator-validator triplet +// with no other data. It is intended to be used as a marshalable pointer. For +// example, a DVVTriplet can be used to construct the key to getting a +// Redelegation from state. +message DVVTriplet { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string validator_src_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string validator_dst_address = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// DVVTriplets defines an array of DVVTriplet objects. +message DVVTriplets { + repeated DVVTriplet triplets = 1 [(gogoproto.nullable) = false]; +} + +// Delegation represents the bond with tokens held by an account. It is +// owned by one delegator, and is associated with the voting power of one +// validator. +message Delegation { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + + // delegator_address is the bech32-encoded address of the delegator. + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // validator_address is the bech32-encoded address of the validator. + string validator_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // shares define the delegation shares received. + string shares = 3 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; +} + +// UnbondingDelegation stores all of a single delegator's unbonding bonds +// for a single validator in an time-ordered list. +message UnbondingDelegation { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + + // delegator_address is the bech32-encoded address of the delegator. + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // validator_address is the bech32-encoded address of the validator. + string validator_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // entries are the unbonding delegation entries. + repeated UnbondingDelegationEntry entries = 3 [(gogoproto.nullable) = false]; // unbonding delegation entries +} + +// UnbondingDelegationEntry defines an unbonding object with relevant metadata. +message UnbondingDelegationEntry { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + + // creation_height is the height which the unbonding took place. + int64 creation_height = 1; + // completion_time is the unix time for unbonding completion. + google.protobuf.Timestamp completion_time = 2 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + // initial_balance defines the tokens initially scheduled to receive at completion. + string initial_balance = 3 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false + ]; + // balance defines the tokens to receive at completion. + string balance = 4 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false + ]; +} + +// RedelegationEntry defines a redelegation object with relevant metadata. +message RedelegationEntry { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + + // creation_height defines the height which the redelegation took place. + int64 creation_height = 1; + // completion_time defines the unix time for redelegation completion. + google.protobuf.Timestamp completion_time = 2 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + // initial_balance defines the initial balance when redelegation started. + string initial_balance = 3 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false + ]; + // shares_dst is the amount of destination-validator shares created by redelegation. + string shares_dst = 4 [ + (cosmos_proto.scalar) = "cosmos.Dec", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; +} + +// Redelegation contains the list of a particular delegator's redelegating bonds +// from a particular source validator to a particular destination validator. +message Redelegation { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + + // delegator_address is the bech32-encoded address of the delegator. + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // validator_src_address is the validator redelegation source operator address. + string validator_src_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // validator_dst_address is the validator redelegation destination operator address. + string validator_dst_address = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // entries are the redelegation entries. + repeated RedelegationEntry entries = 4 [(gogoproto.nullable) = false]; // redelegation entries +} + +// Params defines the parameters for the staking module. +message Params { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + + // unbonding_time is the time duration of unbonding. + google.protobuf.Duration unbonding_time = 1 [(gogoproto.nullable) = false, (gogoproto.stdduration) = true]; + // max_validators is the maximum number of validators. + uint32 max_validators = 2; + // max_entries is the max entries for either unbonding delegation or redelegation (per pair/trio). + uint32 max_entries = 3; + // historical_entries is the number of historical entries to persist. + uint32 historical_entries = 4; + // bond_denom defines the bondable coin denomination. + string bond_denom = 5; + // min_commission_rate is the chain-wide minimum commission rate that a validator can charge their delegators + string min_commission_rate = 6 [ + (gogoproto.moretags) = "yaml:\"min_commission_rate\"", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; +} + +// DelegationResponse is equivalent to Delegation except that it contains a +// balance in addition to shares which is more suitable for client responses. +message DelegationResponse { + option (gogoproto.equal) = false; + option (gogoproto.goproto_stringer) = false; + + Delegation delegation = 1 [(gogoproto.nullable) = false]; + + cosmos.base.v1beta1.Coin balance = 2 [(gogoproto.nullable) = false]; +} + +// RedelegationEntryResponse is equivalent to a RedelegationEntry except that it +// contains a balance in addition to shares which is more suitable for client +// responses. +message RedelegationEntryResponse { + option (gogoproto.equal) = true; + + RedelegationEntry redelegation_entry = 1 [(gogoproto.nullable) = false]; + string balance = 4 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false + ]; +} + +// RedelegationResponse is equivalent to a Redelegation except that its entries +// contain a balance in addition to shares which is more suitable for client +// responses. +message RedelegationResponse { + option (gogoproto.equal) = false; + + Redelegation redelegation = 1 [(gogoproto.nullable) = false]; + repeated RedelegationEntryResponse entries = 2 [(gogoproto.nullable) = false]; +} + +// Pool is used for tracking bonded and not-bonded token supply of the bond +// denomination. +message Pool { + option (gogoproto.description) = true; + option (gogoproto.equal) = true; + string not_bonded_tokens = 1 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "not_bonded_tokens" + ]; + string bonded_tokens = 2 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "bonded_tokens" + ]; +} diff --git a/packages/codegen/proto/cosmos/staking/v1beta1/tx.proto b/packages/codegen/proto/cosmos/staking/v1beta1/tx.proto new file mode 100644 index 00000000..6c8d40a7 --- /dev/null +++ b/packages/codegen/proto/cosmos/staking/v1beta1/tx.proto @@ -0,0 +1,138 @@ +syntax = "proto3"; +package cosmos.staking.v1beta1; + +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; +import "gogoproto/gogo.proto"; + +import "cosmos_proto/cosmos.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos/staking/v1beta1/staking.proto"; + +import "cosmos/msg/v1/msg.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/staking/types"; + +// Msg defines the staking Msg service. +service Msg { + // CreateValidator defines a method for creating a new validator. + rpc CreateValidator(MsgCreateValidator) returns (MsgCreateValidatorResponse); + + // EditValidator defines a method for editing an existing validator. + rpc EditValidator(MsgEditValidator) returns (MsgEditValidatorResponse); + + // Delegate defines a method for performing a delegation of coins + // from a delegator to a validator. + rpc Delegate(MsgDelegate) returns (MsgDelegateResponse); + + // BeginRedelegate defines a method for performing a redelegation + // of coins from a delegator and source validator to a destination validator. + rpc BeginRedelegate(MsgBeginRedelegate) returns (MsgBeginRedelegateResponse); + + // Undelegate defines a method for performing an undelegation from a + // delegate and a validator. + rpc Undelegate(MsgUndelegate) returns (MsgUndelegateResponse); +} + +// MsgCreateValidator defines a SDK message for creating a new validator. +message MsgCreateValidator { + // NOTE(fdymylja): this is a particular case in which + // if validator_address == delegator_address then only one + // is expected to sign, otherwise both are. + option (cosmos.msg.v1.signer) = "delegator_address"; + option (cosmos.msg.v1.signer) = "validator_address"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + Description description = 1 [(gogoproto.nullable) = false]; + CommissionRates commission = 2 [(gogoproto.nullable) = false]; + string min_self_delegation = 3 [ + (cosmos_proto.scalar) = "cosmos.Int", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int", + (gogoproto.nullable) = false + ]; + string delegator_address = 4 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string validator_address = 5 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + google.protobuf.Any pubkey = 6 [(cosmos_proto.accepts_interface) = "cosmos.crypto.PubKey"]; + cosmos.base.v1beta1.Coin value = 7 [(gogoproto.nullable) = false]; +} + +// MsgCreateValidatorResponse defines the Msg/CreateValidator response type. +message MsgCreateValidatorResponse {} + +// MsgEditValidator defines a SDK message for editing an existing validator. +message MsgEditValidator { + option (cosmos.msg.v1.signer) = "validator_address"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + Description description = 1 [(gogoproto.nullable) = false]; + string validator_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // We pass a reference to the new commission rate and min self delegation as + // it's not mandatory to update. If not updated, the deserialized rate will be + // zero with no way to distinguish if an update was intended. + // REF: #2373 + string commission_rate = 3 + [(cosmos_proto.scalar) = "cosmos.Dec", (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec"]; + string min_self_delegation = 4 + [(cosmos_proto.scalar) = "cosmos.Int", (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Int"]; +} + +// MsgEditValidatorResponse defines the Msg/EditValidator response type. +message MsgEditValidatorResponse {} + +// MsgDelegate defines a SDK message for performing a delegation of coins +// from a delegator to a validator. +message MsgDelegate { + option (cosmos.msg.v1.signer) = "delegator_address"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string validator_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + cosmos.base.v1beta1.Coin amount = 3 [(gogoproto.nullable) = false]; +} + +// MsgDelegateResponse defines the Msg/Delegate response type. +message MsgDelegateResponse {} + +// MsgBeginRedelegate defines a SDK message for performing a redelegation +// of coins from a delegator and source validator to a destination validator. +message MsgBeginRedelegate { + option (cosmos.msg.v1.signer) = "delegator_address"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string validator_src_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string validator_dst_address = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + cosmos.base.v1beta1.Coin amount = 4 [(gogoproto.nullable) = false]; +} + +// MsgBeginRedelegateResponse defines the Msg/BeginRedelegate response type. +message MsgBeginRedelegateResponse { + google.protobuf.Timestamp completion_time = 1 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; +} + +// MsgUndelegate defines a SDK message for performing an undelegation from a +// delegate and a validator. +message MsgUndelegate { + option (cosmos.msg.v1.signer) = "delegator_address"; + + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string delegator_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string validator_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + cosmos.base.v1beta1.Coin amount = 3 [(gogoproto.nullable) = false]; +} + +// MsgUndelegateResponse defines the Msg/Undelegate response type. +message MsgUndelegateResponse { + google.protobuf.Timestamp completion_time = 1 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; +} diff --git a/packages/codegen/proto/cosmos/tx/signing/v1beta1/signing.proto b/packages/codegen/proto/cosmos/tx/signing/v1beta1/signing.proto new file mode 100644 index 00000000..5a22616f --- /dev/null +++ b/packages/codegen/proto/cosmos/tx/signing/v1beta1/signing.proto @@ -0,0 +1,94 @@ +syntax = "proto3"; +package cosmos.tx.signing.v1beta1; + +import "cosmos/crypto/multisig/v1beta1/multisig.proto"; +import "google/protobuf/any.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/types/tx/signing"; + +// SignMode represents a signing mode with its own security guarantees. +// +// This enum should be considered a registry of all known sign modes +// in the Cosmos ecosystem. Apps are not expected to support all known +// sign modes. Apps that would like to support custom sign modes are +// encouraged to open a small PR against this file to add a new case +// to this SignMode enum describing their sign mode so that different +// apps have a consistent version of this enum. +enum SignMode { + // SIGN_MODE_UNSPECIFIED specifies an unknown signing mode and will be + // rejected. + SIGN_MODE_UNSPECIFIED = 0; + + // SIGN_MODE_DIRECT specifies a signing mode which uses SignDoc and is + // verified with raw bytes from Tx. + SIGN_MODE_DIRECT = 1; + + // SIGN_MODE_TEXTUAL is a future signing mode that will verify some + // human-readable textual representation on top of the binary representation + // from SIGN_MODE_DIRECT. It is currently not supported. + SIGN_MODE_TEXTUAL = 2; + + // SIGN_MODE_DIRECT_AUX specifies a signing mode which uses + // SignDocDirectAux. As opposed to SIGN_MODE_DIRECT, this sign mode does not + // require signers signing over other signers' `signer_info`. It also allows + // for adding Tips in transactions. + // + // Since: cosmos-sdk 0.46 + SIGN_MODE_DIRECT_AUX = 3; + + // SIGN_MODE_LEGACY_AMINO_JSON is a backwards compatibility mode which uses + // Amino JSON and will be removed in the future. + SIGN_MODE_LEGACY_AMINO_JSON = 127; +} + +// SignatureDescriptors wraps multiple SignatureDescriptor's. +message SignatureDescriptors { + // signatures are the signature descriptors + repeated SignatureDescriptor signatures = 1; +} + +// SignatureDescriptor is a convenience type which represents the full data for +// a signature including the public key of the signer, signing modes and the +// signature itself. It is primarily used for coordinating signatures between +// clients. +message SignatureDescriptor { + // public_key is the public key of the signer + google.protobuf.Any public_key = 1; + + Data data = 2; + + // sequence is the sequence of the account, which describes the + // number of committed transactions signed by a given address. It is used to prevent + // replay attacks. + uint64 sequence = 3; + + // Data represents signature data + message Data { + // sum is the oneof that specifies whether this represents single or multi-signature data + oneof sum { + // single represents a single signer + Single single = 1; + + // multi represents a multisig signer + Multi multi = 2; + } + + // Single is the signature data for a single signer + message Single { + // mode is the signing mode of the single signer + SignMode mode = 1; + + // signature is the raw signature bytes + bytes signature = 2; + } + + // Multi is the signature data for a multisig public key + message Multi { + // bitarray specifies which keys within the multisig are signing + cosmos.crypto.multisig.v1beta1.CompactBitArray bitarray = 1; + + // signatures is the signatures of the multi-signature + repeated Data signatures = 2; + } + } +} diff --git a/packages/codegen/proto/cosmos/tx/v1beta1/service.proto b/packages/codegen/proto/cosmos/tx/v1beta1/service.proto new file mode 100644 index 00000000..e7af1526 --- /dev/null +++ b/packages/codegen/proto/cosmos/tx/v1beta1/service.proto @@ -0,0 +1,163 @@ +syntax = "proto3"; +package cosmos.tx.v1beta1; + +import "google/api/annotations.proto"; +import "cosmos/base/abci/v1beta1/abci.proto"; +import "cosmos/tx/v1beta1/tx.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "tendermint/types/block.proto"; +import "tendermint/types/types.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/types/tx"; + +// Service defines a gRPC service for interacting with transactions. +service Service { + // Simulate simulates executing a transaction for estimating gas usage. + rpc Simulate(SimulateRequest) returns (SimulateResponse) { + option (google.api.http) = { + post: "/cosmos/tx/v1beta1/simulate" + body: "*" + }; + } + // GetTx fetches a tx by hash. + rpc GetTx(GetTxRequest) returns (GetTxResponse) { + option (google.api.http).get = "/cosmos/tx/v1beta1/txs/{hash}"; + } + // BroadcastTx broadcast transaction. + rpc BroadcastTx(BroadcastTxRequest) returns (BroadcastTxResponse) { + option (google.api.http) = { + post: "/cosmos/tx/v1beta1/txs" + body: "*" + }; + } + // GetTxsEvent fetches txs by event. + rpc GetTxsEvent(GetTxsEventRequest) returns (GetTxsEventResponse) { + option (google.api.http).get = "/cosmos/tx/v1beta1/txs"; + } + // GetBlockWithTxs fetches a block with decoded txs. + // + // Since: cosmos-sdk 0.45.2 + rpc GetBlockWithTxs(GetBlockWithTxsRequest) returns (GetBlockWithTxsResponse) { + option (google.api.http).get = "/cosmos/tx/v1beta1/txs/block/{height}"; + } +} + +// GetTxsEventRequest is the request type for the Service.TxsByEvents +// RPC method. +message GetTxsEventRequest { + // events is the list of transaction event type. + repeated string events = 1; + // pagination defines a pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; + OrderBy order_by = 3; +} + +// OrderBy defines the sorting order +enum OrderBy { + // ORDER_BY_UNSPECIFIED specifies an unknown sorting order. OrderBy defaults to ASC in this case. + ORDER_BY_UNSPECIFIED = 0; + // ORDER_BY_ASC defines ascending order + ORDER_BY_ASC = 1; + // ORDER_BY_DESC defines descending order + ORDER_BY_DESC = 2; +} + +// GetTxsEventResponse is the response type for the Service.TxsByEvents +// RPC method. +message GetTxsEventResponse { + // txs is the list of queried transactions. + repeated cosmos.tx.v1beta1.Tx txs = 1; + // tx_responses is the list of queried TxResponses. + repeated cosmos.base.abci.v1beta1.TxResponse tx_responses = 2; + // pagination defines a pagination for the response. + cosmos.base.query.v1beta1.PageResponse pagination = 3; +} + +// BroadcastTxRequest is the request type for the Service.BroadcastTxRequest +// RPC method. +message BroadcastTxRequest { + // tx_bytes is the raw transaction. + bytes tx_bytes = 1; + BroadcastMode mode = 2; +} + +// BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method. +enum BroadcastMode { + // zero-value for mode ordering + BROADCAST_MODE_UNSPECIFIED = 0; + // BROADCAST_MODE_BLOCK defines a tx broadcasting mode where the client waits for + // the tx to be committed in a block. + BROADCAST_MODE_BLOCK = 1; + // BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for + // a CheckTx execution response only. + BROADCAST_MODE_SYNC = 2; + // BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns + // immediately. + BROADCAST_MODE_ASYNC = 3; +} + +// BroadcastTxResponse is the response type for the +// Service.BroadcastTx method. +message BroadcastTxResponse { + // tx_response is the queried TxResponses. + cosmos.base.abci.v1beta1.TxResponse tx_response = 1; +} + +// SimulateRequest is the request type for the Service.Simulate +// RPC method. +message SimulateRequest { + // tx is the transaction to simulate. + // Deprecated. Send raw tx bytes instead. + cosmos.tx.v1beta1.Tx tx = 1 [deprecated = true]; + // tx_bytes is the raw transaction. + // + // Since: cosmos-sdk 0.43 + bytes tx_bytes = 2; +} + +// SimulateResponse is the response type for the +// Service.SimulateRPC method. +message SimulateResponse { + // gas_info is the information about gas used in the simulation. + cosmos.base.abci.v1beta1.GasInfo gas_info = 1; + // result is the result of the simulation. + cosmos.base.abci.v1beta1.Result result = 2; +} + +// GetTxRequest is the request type for the Service.GetTx +// RPC method. +message GetTxRequest { + // hash is the tx hash to query, encoded as a hex string. + string hash = 1; +} + +// GetTxResponse is the response type for the Service.GetTx method. +message GetTxResponse { + // tx is the queried transaction. + cosmos.tx.v1beta1.Tx tx = 1; + // tx_response is the queried TxResponses. + cosmos.base.abci.v1beta1.TxResponse tx_response = 2; +} + +// GetBlockWithTxsRequest is the request type for the Service.GetBlockWithTxs +// RPC method. +// +// Since: cosmos-sdk 0.45.2 +message GetBlockWithTxsRequest { + // height is the height of the block to query. + int64 height = 1; + // pagination defines a pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// GetBlockWithTxsResponse is the response type for the Service.GetBlockWithTxs method. +// +// Since: cosmos-sdk 0.45.2 +message GetBlockWithTxsResponse { + // txs are the transactions in the block. + repeated cosmos.tx.v1beta1.Tx txs = 1; + .tendermint.types.BlockID block_id = 2; + .tendermint.types.Block block = 3; + // pagination defines a pagination for the response. + cosmos.base.query.v1beta1.PageResponse pagination = 4; +} \ No newline at end of file diff --git a/packages/codegen/proto/cosmos/tx/v1beta1/tx.proto b/packages/codegen/proto/cosmos/tx/v1beta1/tx.proto new file mode 100644 index 00000000..ac7b690f --- /dev/null +++ b/packages/codegen/proto/cosmos/tx/v1beta1/tx.proto @@ -0,0 +1,249 @@ +syntax = "proto3"; +package cosmos.tx.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos/crypto/multisig/v1beta1/multisig.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos/tx/signing/v1beta1/signing.proto"; +import "google/protobuf/any.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/types/tx"; + +// Tx is the standard type used for broadcasting transactions. +message Tx { + // body is the processable content of the transaction + TxBody body = 1; + + // auth_info is the authorization related content of the transaction, + // specifically signers, signer modes and fee + AuthInfo auth_info = 2; + + // signatures is a list of signatures that matches the length and order of + // AuthInfo's signer_infos to allow connecting signature meta information like + // public key and signing mode by position. + repeated bytes signatures = 3; +} + +// TxRaw is a variant of Tx that pins the signer's exact binary representation +// of body and auth_info. This is used for signing, broadcasting and +// verification. The binary `serialize(tx: TxRaw)` is stored in Tendermint and +// the hash `sha256(serialize(tx: TxRaw))` becomes the "txhash", commonly used +// as the transaction ID. +message TxRaw { + // body_bytes is a protobuf serialization of a TxBody that matches the + // representation in SignDoc. + bytes body_bytes = 1; + + // auth_info_bytes is a protobuf serialization of an AuthInfo that matches the + // representation in SignDoc. + bytes auth_info_bytes = 2; + + // signatures is a list of signatures that matches the length and order of + // AuthInfo's signer_infos to allow connecting signature meta information like + // public key and signing mode by position. + repeated bytes signatures = 3; +} + +// SignDoc is the type used for generating sign bytes for SIGN_MODE_DIRECT. +message SignDoc { + // body_bytes is protobuf serialization of a TxBody that matches the + // representation in TxRaw. + bytes body_bytes = 1; + + // auth_info_bytes is a protobuf serialization of an AuthInfo that matches the + // representation in TxRaw. + bytes auth_info_bytes = 2; + + // chain_id is the unique identifier of the chain this transaction targets. + // It prevents signed transactions from being used on another chain by an + // attacker + string chain_id = 3; + + // account_number is the account number of the account in state + uint64 account_number = 4; +} + +// SignDocDirectAux is the type used for generating sign bytes for +// SIGN_MODE_DIRECT_AUX. +// +// Since: cosmos-sdk 0.46 +message SignDocDirectAux { + // body_bytes is protobuf serialization of a TxBody that matches the + // representation in TxRaw. + bytes body_bytes = 1; + + // public_key is the public key of the signing account. + google.protobuf.Any public_key = 2; + + // chain_id is the identifier of the chain this transaction targets. + // It prevents signed transactions from being used on another chain by an + // attacker. + string chain_id = 3; + + // account_number is the account number of the account in state. + uint64 account_number = 4; + + // sequence is the sequence number of the signing account. + uint64 sequence = 5; + + // Tip is the optional tip used for meta-transactions. It should be left + // empty if the signer is not the tipper for this transaction. + Tip tip = 6; +} + +// TxBody is the body of a transaction that all signers sign over. +message TxBody { + // messages is a list of messages to be executed. The required signers of + // those messages define the number and order of elements in AuthInfo's + // signer_infos and Tx's signatures. Each required signer address is added to + // the list only the first time it occurs. + // By convention, the first required signer (usually from the first message) + // is referred to as the primary signer and pays the fee for the whole + // transaction. + repeated google.protobuf.Any messages = 1; + + // memo is any arbitrary note/comment to be added to the transaction. + // WARNING: in clients, any publicly exposed text should not be called memo, + // but should be called `note` instead (see https://github.com/cosmos/cosmos-sdk/issues/9122). + string memo = 2; + + // timeout is the block height after which this transaction will not + // be processed by the chain + uint64 timeout_height = 3; + + // extension_options are arbitrary options that can be added by chains + // when the default options are not sufficient. If any of these are present + // and can't be handled, the transaction will be rejected + repeated google.protobuf.Any extension_options = 1023; + + // extension_options are arbitrary options that can be added by chains + // when the default options are not sufficient. If any of these are present + // and can't be handled, they will be ignored + repeated google.protobuf.Any non_critical_extension_options = 2047; +} + +// AuthInfo describes the fee and signer modes that are used to sign a +// transaction. +message AuthInfo { + // signer_infos defines the signing modes for the required signers. The number + // and order of elements must match the required signers from TxBody's + // messages. The first element is the primary signer and the one which pays + // the fee. + repeated SignerInfo signer_infos = 1; + + // Fee is the fee and gas limit for the transaction. The first signer is the + // primary signer and the one which pays the fee. The fee can be calculated + // based on the cost of evaluating the body and doing signature verification + // of the signers. This can be estimated via simulation. + Fee fee = 2; + + // Tip is the optional tip used for meta-transactions. + // + // Since: cosmos-sdk 0.46 + Tip tip = 3; +} + +// SignerInfo describes the public key and signing mode of a single top-level +// signer. +message SignerInfo { + // public_key is the public key of the signer. It is optional for accounts + // that already exist in state. If unset, the verifier can use the required \ + // signer address for this position and lookup the public key. + google.protobuf.Any public_key = 1; + + // mode_info describes the signing mode of the signer and is a nested + // structure to support nested multisig pubkey's + ModeInfo mode_info = 2; + + // sequence is the sequence of the account, which describes the + // number of committed transactions signed by a given address. It is used to + // prevent replay attacks. + uint64 sequence = 3; +} + +// ModeInfo describes the signing mode of a single or nested multisig signer. +message ModeInfo { + // sum is the oneof that specifies whether this represents a single or nested + // multisig signer + oneof sum { + // single represents a single signer + Single single = 1; + + // multi represents a nested multisig signer + Multi multi = 2; + } + + // Single is the mode info for a single signer. It is structured as a message + // to allow for additional fields such as locale for SIGN_MODE_TEXTUAL in the + // future + message Single { + // mode is the signing mode of the single signer + cosmos.tx.signing.v1beta1.SignMode mode = 1; + } + + // Multi is the mode info for a multisig public key + message Multi { + // bitarray specifies which keys within the multisig are signing + cosmos.crypto.multisig.v1beta1.CompactBitArray bitarray = 1; + + // mode_infos is the corresponding modes of the signers of the multisig + // which could include nested multisig public keys + repeated ModeInfo mode_infos = 2; + } +} + +// Fee includes the amount of coins paid in fees and the maximum +// gas to be used by the transaction. The ratio yields an effective "gasprice", +// which must be above some miminum to be accepted into the mempool. +message Fee { + // amount is the amount of coins to be paid as a fee + repeated cosmos.base.v1beta1.Coin amount = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + + // gas_limit is the maximum gas that can be used in transaction processing + // before an out of gas error occurs + uint64 gas_limit = 2; + + // if unset, the first signer is responsible for paying the fees. If set, the specified account must pay the fees. + // the payer must be a tx signer (and thus have signed this field in AuthInfo). + // setting this field does *not* change the ordering of required signers for the transaction. + string payer = 3 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // if set, the fee payer (either the first signer or the value of the payer field) requests that a fee grant be used + // to pay fees instead of the fee payer's own balance. If an appropriate fee grant does not exist or the chain does + // not support fee grants, this will fail + string granter = 4 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// Tip is the tip used for meta-transactions. +// +// Since: cosmos-sdk 0.46 +message Tip { + // amount is the amount of the tip + repeated cosmos.base.v1beta1.Coin amount = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + // tipper is the address of the account paying for the tip + string tipper = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// AuxSignerData is the intermediary format that an auxiliary signer (e.g. a +// tipper) builds and sends to the fee payer (who will build and broadcast the +// actual tx). AuxSignerData is not a valid tx in itself, and will be rejected +// by the node if sent directly as-is. +// +// Since: cosmos-sdk 0.46 +message AuxSignerData { + // address is the bech32-encoded address of the auxiliary signer. If using + // AuxSignerData across different chains, the bech32 prefix of the target + // chain (where the final transaction is broadcasted) should be used. + string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + // sign_doc is the SIGN_MOD_DIRECT_AUX sign doc that the auxiliary signer + // signs. Note: we use the same sign doc even if we're signing with + // LEGACY_AMINO_JSON. + SignDocDirectAux sign_doc = 2; + // mode is the signing mode of the single signer + cosmos.tx.signing.v1beta1.SignMode mode = 3; + // sig is the signature of the sign doc. + bytes sig = 4; +} diff --git a/packages/codegen/proto/cosmos/upgrade/v1beta1/query.proto b/packages/codegen/proto/cosmos/upgrade/v1beta1/query.proto new file mode 100644 index 00000000..e8c4baa0 --- /dev/null +++ b/packages/codegen/proto/cosmos/upgrade/v1beta1/query.proto @@ -0,0 +1,120 @@ +syntax = "proto3"; +package cosmos.upgrade.v1beta1; + +import "google/api/annotations.proto"; +import "cosmos/upgrade/v1beta1/upgrade.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/upgrade/types"; + +// Query defines the gRPC upgrade querier service. +service Query { + // CurrentPlan queries the current upgrade plan. + rpc CurrentPlan(QueryCurrentPlanRequest) returns (QueryCurrentPlanResponse) { + option (google.api.http).get = "/cosmos/upgrade/v1beta1/current_plan"; + } + + // AppliedPlan queries a previously applied upgrade plan by its name. + rpc AppliedPlan(QueryAppliedPlanRequest) returns (QueryAppliedPlanResponse) { + option (google.api.http).get = "/cosmos/upgrade/v1beta1/applied_plan/{name}"; + } + + // UpgradedConsensusState queries the consensus state that will serve + // as a trusted kernel for the next version of this chain. It will only be + // stored at the last height of this chain. + // UpgradedConsensusState RPC not supported with legacy querier + // This rpc is deprecated now that IBC has its own replacement + // (https://github.com/cosmos/ibc-go/blob/2c880a22e9f9cc75f62b527ca94aa75ce1106001/proto/ibc/core/client/v1/query.proto#L54) + rpc UpgradedConsensusState(QueryUpgradedConsensusStateRequest) returns (QueryUpgradedConsensusStateResponse) { + option deprecated = true; + option (google.api.http).get = "/cosmos/upgrade/v1beta1/upgraded_consensus_state/{last_height}"; + } + + // ModuleVersions queries the list of module versions from state. + // + // Since: cosmos-sdk 0.43 + rpc ModuleVersions(QueryModuleVersionsRequest) returns (QueryModuleVersionsResponse) { + option (google.api.http).get = "/cosmos/upgrade/v1beta1/module_versions"; + } + + // Returns the account with authority to conduct upgrades + rpc Authority(QueryAuthorityRequest) returns (QueryAuthorityResponse) { + option (google.api.http).get = "/cosmos/upgrade/v1beta1/authority"; + } +} + +// QueryCurrentPlanRequest is the request type for the Query/CurrentPlan RPC +// method. +message QueryCurrentPlanRequest {} + +// QueryCurrentPlanResponse is the response type for the Query/CurrentPlan RPC +// method. +message QueryCurrentPlanResponse { + // plan is the current upgrade plan. + Plan plan = 1; +} + +// QueryCurrentPlanRequest is the request type for the Query/AppliedPlan RPC +// method. +message QueryAppliedPlanRequest { + // name is the name of the applied plan to query for. + string name = 1; +} + +// QueryAppliedPlanResponse is the response type for the Query/AppliedPlan RPC +// method. +message QueryAppliedPlanResponse { + // height is the block height at which the plan was applied. + int64 height = 1; +} + +// QueryUpgradedConsensusStateRequest is the request type for the Query/UpgradedConsensusState +// RPC method. +message QueryUpgradedConsensusStateRequest { + option deprecated = true; + + // last height of the current chain must be sent in request + // as this is the height under which next consensus state is stored + int64 last_height = 1; +} + +// QueryUpgradedConsensusStateResponse is the response type for the Query/UpgradedConsensusState +// RPC method. +message QueryUpgradedConsensusStateResponse { + option deprecated = true; + reserved 1; + + // Since: cosmos-sdk 0.43 + bytes upgraded_consensus_state = 2; +} + +// QueryModuleVersionsRequest is the request type for the Query/ModuleVersions +// RPC method. +// +// Since: cosmos-sdk 0.43 +message QueryModuleVersionsRequest { + // module_name is a field to query a specific module + // consensus version from state. Leaving this empty will + // fetch the full list of module versions from state + string module_name = 1; +} + +// QueryModuleVersionsResponse is the response type for the Query/ModuleVersions +// RPC method. +// +// Since: cosmos-sdk 0.43 +message QueryModuleVersionsResponse { + // module_versions is a list of module names with their consensus versions. + repeated ModuleVersion module_versions = 1; +} + +// QueryAuthorityRequest is the request type for Query/Authority +// +// Since: cosmos-sdk 0.46 +message QueryAuthorityRequest {} + +// QueryAuthorityResponse is the response type for Query/Authority +// +// Since: cosmos-sdk 0.46 +message QueryAuthorityResponse { + string address = 1; +} \ No newline at end of file diff --git a/packages/codegen/proto/cosmos/upgrade/v1beta1/tx.proto b/packages/codegen/proto/cosmos/upgrade/v1beta1/tx.proto new file mode 100644 index 00000000..9b04bf44 --- /dev/null +++ b/packages/codegen/proto/cosmos/upgrade/v1beta1/tx.proto @@ -0,0 +1,55 @@ +syntax = "proto3"; +package cosmos.upgrade.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/upgrade/v1beta1/upgrade.proto"; +import "cosmos/msg/v1/msg.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/upgrade/types"; + +// Msg defines the upgrade Msg service. +service Msg { + // SoftwareUpgrade is a governance operation for initiating a software upgrade. + // + // Since: cosmos-sdk 0.46 + rpc SoftwareUpgrade(MsgSoftwareUpgrade) returns (MsgSoftwareUpgradeResponse); + // CancelUpgrade is a governance operation for cancelling a previously + // approvid software upgrade. + // + // Since: cosmos-sdk 0.46 + rpc CancelUpgrade(MsgCancelUpgrade) returns (MsgCancelUpgradeResponse); +} + +// MsgSoftwareUpgrade is the Msg/SoftwareUpgrade request type. +// +// Since: cosmos-sdk 0.46 +message MsgSoftwareUpgrade { + option (cosmos.msg.v1.signer) = "authority"; + + // authority is the address of the governance account. + string authority = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + + // plan is the upgrade plan. + Plan plan = 2 [(gogoproto.nullable) = false]; +} + +// MsgSoftwareUpgradeResponse is the Msg/SoftwareUpgrade response type. +// +// Since: cosmos-sdk 0.46 +message MsgSoftwareUpgradeResponse {} + +// MsgCancelUpgrade is the Msg/CancelUpgrade request type. +// +// Since: cosmos-sdk 0.46 +message MsgCancelUpgrade { + option (cosmos.msg.v1.signer) = "authority"; + + // authority is the address of the governance account. + string authority = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; +} + +// MsgCancelUpgradeResponse is the Msg/CancelUpgrade response type. +// +// Since: cosmos-sdk 0.46 +message MsgCancelUpgradeResponse {} \ No newline at end of file diff --git a/packages/codegen/proto/cosmos/upgrade/v1beta1/upgrade.proto b/packages/codegen/proto/cosmos/upgrade/v1beta1/upgrade.proto new file mode 100644 index 00000000..dc15e27c --- /dev/null +++ b/packages/codegen/proto/cosmos/upgrade/v1beta1/upgrade.proto @@ -0,0 +1,86 @@ +syntax = "proto3"; +package cosmos.upgrade.v1beta1; + +import "google/protobuf/any.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/timestamp.proto"; +import "cosmos_proto/cosmos.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/upgrade/types"; +option (gogoproto.goproto_getters_all) = false; + +// Plan specifies information about a planned upgrade and when it should occur. +message Plan { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + + // Sets the name for the upgrade. This name will be used by the upgraded + // version of the software to apply any special "on-upgrade" commands during + // the first BeginBlock method after the upgrade is applied. It is also used + // to detect whether a software version can handle a given upgrade. If no + // upgrade handler with this name has been set in the software, it will be + // assumed that the software is out-of-date when the upgrade Time or Height is + // reached and the software will exit. + string name = 1; + + // Deprecated: Time based upgrades have been deprecated. Time based upgrade logic + // has been removed from the SDK. + // If this field is not empty, an error will be thrown. + google.protobuf.Timestamp time = 2 [deprecated = true, (gogoproto.stdtime) = true, (gogoproto.nullable) = false]; + + // The height at which the upgrade must be performed. + // Only used if Time is not set. + int64 height = 3; + + // Any application specific upgrade info to be included on-chain + // such as a git commit that validators could automatically upgrade to + string info = 4; + + // Deprecated: UpgradedClientState field has been deprecated. IBC upgrade logic has been + // moved to the IBC module in the sub module 02-client. + // If this field is not empty, an error will be thrown. + google.protobuf.Any upgraded_client_state = 5 [deprecated = true]; +} + +// SoftwareUpgradeProposal is a gov Content type for initiating a software +// upgrade. +// Deprecated: This legacy proposal is deprecated in favor of Msg-based gov +// proposals, see MsgSoftwareUpgrade. +message SoftwareUpgradeProposal { + option deprecated = true; + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + string title = 1; + string description = 2; + Plan plan = 3 [(gogoproto.nullable) = false]; +} + +// CancelSoftwareUpgradeProposal is a gov Content type for cancelling a software +// upgrade. +// Deprecated: This legacy proposal is deprecated in favor of Msg-based gov +// proposals, see MsgCancelUpgrade. +message CancelSoftwareUpgradeProposal { + option deprecated = true; + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + string title = 1; + string description = 2; +} + +// ModuleVersion specifies a module and its consensus version. +// +// Since: cosmos-sdk 0.43 +message ModuleVersion { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = true; + + // name of the app module + string name = 1; + + // consensus version of the app module + uint64 version = 2; +} diff --git a/packages/codegen/proto/cosmos/vesting/v1beta1/tx.proto b/packages/codegen/proto/cosmos/vesting/v1beta1/tx.proto new file mode 100644 index 00000000..211bad09 --- /dev/null +++ b/packages/codegen/proto/cosmos/vesting/v1beta1/tx.proto @@ -0,0 +1,74 @@ +syntax = "proto3"; +package cosmos.vesting.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/vesting/v1beta1/vesting.proto"; + +import "cosmos/msg/v1/msg.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/auth/vesting/types"; + +// Msg defines the bank Msg service. +service Msg { + // CreateVestingAccount defines a method that enables creating a vesting + // account. + rpc CreateVestingAccount(MsgCreateVestingAccount) returns (MsgCreateVestingAccountResponse); + // CreatePermanentLockedAccount defines a method that enables creating a permanent + // locked account. + rpc CreatePermanentLockedAccount(MsgCreatePermanentLockedAccount) returns (MsgCreatePermanentLockedAccountResponse); + // CreatePeriodicVestingAccount defines a method that enables creating a + // periodic vesting account. + rpc CreatePeriodicVestingAccount(MsgCreatePeriodicVestingAccount) returns (MsgCreatePeriodicVestingAccountResponse); +} + +// MsgCreateVestingAccount defines a message that enables creating a vesting +// account. +message MsgCreateVestingAccount { + option (cosmos.msg.v1.signer) = "from_address"; + + option (gogoproto.equal) = true; + + string from_address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + string to_address = 2 [(cosmos_proto.scalar) = "cosmos.AddressString"]; + repeated cosmos.base.v1beta1.Coin amount = 3 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + + int64 end_time = 4; + bool delayed = 5; +} + +// MsgCreateVestingAccountResponse defines the Msg/CreateVestingAccount response type. +message MsgCreateVestingAccountResponse {} + +// MsgCreatePermanentLockedAccount defines a message that enables creating a permanent +// locked account. +message MsgCreatePermanentLockedAccount { + option (gogoproto.equal) = true; + + string from_address = 1 [(gogoproto.moretags) = "yaml:\"from_address\""]; + string to_address = 2 [(gogoproto.moretags) = "yaml:\"to_address\""]; + repeated cosmos.base.v1beta1.Coin amount = 3 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; +} + +// MsgCreatePermanentLockedAccountResponse defines the Msg/CreatePermanentLockedAccount response type. +message MsgCreatePermanentLockedAccountResponse {} + +// MsgCreateVestingAccount defines a message that enables creating a vesting +// account. +message MsgCreatePeriodicVestingAccount { + option (cosmos.msg.v1.signer) = "from_address"; + + option (gogoproto.equal) = false; + + string from_address = 1; + string to_address = 2; + int64 start_time = 3; + repeated Period vesting_periods = 4 [(gogoproto.nullable) = false]; +} + +// MsgCreateVestingAccountResponse defines the Msg/CreatePeriodicVestingAccount +// response type. +message MsgCreatePeriodicVestingAccountResponse {} diff --git a/packages/codegen/proto/cosmos/vesting/v1beta1/vesting.proto b/packages/codegen/proto/cosmos/vesting/v1beta1/vesting.proto new file mode 100644 index 00000000..824cc30d --- /dev/null +++ b/packages/codegen/proto/cosmos/vesting/v1beta1/vesting.proto @@ -0,0 +1,76 @@ +syntax = "proto3"; +package cosmos.vesting.v1beta1; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmos/auth/v1beta1/auth.proto"; + +option go_package = "github.com/cosmos/cosmos-sdk/x/auth/vesting/types"; + +// BaseVestingAccount implements the VestingAccount interface. It contains all +// the necessary fields needed for any vesting account implementation. +message BaseVestingAccount { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + + cosmos.auth.v1beta1.BaseAccount base_account = 1 [(gogoproto.embed) = true]; + repeated cosmos.base.v1beta1.Coin original_vesting = 2 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + repeated cosmos.base.v1beta1.Coin delegated_free = 3 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + repeated cosmos.base.v1beta1.Coin delegated_vesting = 4 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; + int64 end_time = 5; +} + +// ContinuousVestingAccount implements the VestingAccount interface. It +// continuously vests by unlocking coins linearly with respect to time. +message ContinuousVestingAccount { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + + BaseVestingAccount base_vesting_account = 1 [(gogoproto.embed) = true]; + int64 start_time = 2; +} + +// DelayedVestingAccount implements the VestingAccount interface. It vests all +// coins after a specific time, but non prior. In other words, it keeps them +// locked until a specified time. +message DelayedVestingAccount { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + + BaseVestingAccount base_vesting_account = 1 [(gogoproto.embed) = true]; +} + +// Period defines a length of time and amount of coins that will vest. +message Period { + option (gogoproto.goproto_stringer) = false; + + int64 length = 1; + repeated cosmos.base.v1beta1.Coin amount = 2 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins"]; +} + +// PeriodicVestingAccount implements the VestingAccount interface. It +// periodically vests by unlocking coins during each specified period. +message PeriodicVestingAccount { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + + BaseVestingAccount base_vesting_account = 1 [(gogoproto.embed) = true]; + int64 start_time = 2; + repeated Period vesting_periods = 3 [(gogoproto.nullable) = false]; +} + +// PermanentLockedAccount implements the VestingAccount interface. It does +// not ever release coins, locking them indefinitely. Coins in this account can +// still be used for delegating and for governance votes even while locked. +// +// Since: cosmos-sdk 0.43 +message PermanentLockedAccount { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + + BaseVestingAccount base_vesting_account = 1 [(gogoproto.embed) = true]; +} diff --git a/packages/codegen/proto/cosmos_proto/LICENSE b/packages/codegen/proto/cosmos_proto/LICENSE new file mode 100644 index 00000000..6b3e3508 --- /dev/null +++ b/packages/codegen/proto/cosmos_proto/LICENSE @@ -0,0 +1,204 @@ +Pulsar +License: Apache2.0 + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Regen Network Development, Inc. & All in Bits, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/packages/codegen/proto/cosmos_proto/README.md b/packages/codegen/proto/cosmos_proto/README.md new file mode 100644 index 00000000..9599cc65 --- /dev/null +++ b/packages/codegen/proto/cosmos_proto/README.md @@ -0,0 +1 @@ +# cosmos_proto \ No newline at end of file diff --git a/packages/codegen/proto/cosmos_proto/cosmos.proto b/packages/codegen/proto/cosmos_proto/cosmos.proto new file mode 100644 index 00000000..5c63b86f --- /dev/null +++ b/packages/codegen/proto/cosmos_proto/cosmos.proto @@ -0,0 +1,97 @@ +syntax = "proto3"; +package cosmos_proto; + +import "google/protobuf/descriptor.proto"; + +option go_package = "github.com/cosmos/cosmos-proto;cosmos_proto"; + +extend google.protobuf.MessageOptions { + + // implements_interface is used to indicate the type name of the interface + // that a message implements so that it can be used in google.protobuf.Any + // fields that accept that interface. A message can implement multiple + // interfaces. Interfaces should be declared using a declare_interface + // file option. + repeated string implements_interface = 93001; +} + +extend google.protobuf.FieldOptions { + + // accepts_interface is used to annotate that a google.protobuf.Any + // field accepts messages that implement the specified interface. + // Interfaces should be declared using a declare_interface file option. + string accepts_interface = 93001; + + // scalar is used to indicate that this field follows the formatting defined + // by the named scalar which should be declared with declare_scalar. Code + // generators may choose to use this information to map this field to a + // language-specific type representing the scalar. + string scalar = 93002; +} + +extend google.protobuf.FileOptions { + + // declare_interface declares an interface type to be used with + // accepts_interface and implements_interface. Interface names are + // expected to follow the following convention such that their declaration + // can be discovered by tools: for a given interface type a.b.C, it is + // expected that the declaration will be found in a protobuf file named + // a/b/interfaces.proto in the file descriptor set. + repeated InterfaceDescriptor declare_interface = 793021; + + // declare_scalar declares a scalar type to be used with + // the scalar field option. Scalar names are + // expected to follow the following convention such that their declaration + // can be discovered by tools: for a given scalar type a.b.C, it is + // expected that the declaration will be found in a protobuf file named + // a/b/scalars.proto in the file descriptor set. + repeated ScalarDescriptor declare_scalar = 793022; +} + +// InterfaceDescriptor describes an interface type to be used with +// accepts_interface and implements_interface and declared by declare_interface. +message InterfaceDescriptor { + + // name is the name of the interface. It should be a short-name (without + // a period) such that the fully qualified name of the interface will be + // package.name, ex. for the package a.b and interface named C, the + // fully-qualified name will be a.b.C. + string name = 1; + + // description is a human-readable description of the interface and its + // purpose. + string description = 2; +} + +// ScalarDescriptor describes an scalar type to be used with +// the scalar field option and declared by declare_scalar. +// Scalars extend simple protobuf built-in types with additional +// syntax and semantics, for instance to represent big integers. +// Scalars should ideally define an encoding such that there is only one +// valid syntactical representation for a given semantic meaning, +// i.e. the encoding should be deterministic. +message ScalarDescriptor { + + // name is the name of the scalar. It should be a short-name (without + // a period) such that the fully qualified name of the scalar will be + // package.name, ex. for the package a.b and scalar named C, the + // fully-qualified name will be a.b.C. + string name = 1; + + // description is a human-readable description of the scalar and its + // encoding format. For instance a big integer or decimal scalar should + // specify precisely the expected encoding format. + string description = 2; + + // field_type is the type of field with which this scalar can be used. + // Scalars can be used with one and only one type of field so that + // encoding standards and simple and clear. Currently only string and + // bytes fields are supported for scalars. + repeated ScalarType field_type = 3; +} + +enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0; + SCALAR_TYPE_STRING = 1; + SCALAR_TYPE_BYTES = 2; +} diff --git a/packages/codegen/proto/cosmwasm/LICENSE b/packages/codegen/proto/cosmwasm/LICENSE new file mode 100644 index 00000000..5a23302b --- /dev/null +++ b/packages/codegen/proto/cosmwasm/LICENSE @@ -0,0 +1,204 @@ +Cosmos-SDK +License: Apache2.0 + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2016 All in Bits, Inc + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/packages/codegen/proto/cosmwasm/README.md b/packages/codegen/proto/cosmwasm/README.md new file mode 100644 index 00000000..63192e81 --- /dev/null +++ b/packages/codegen/proto/cosmwasm/README.md @@ -0,0 +1 @@ +# cosmwasm \ No newline at end of file diff --git a/packages/codegen/proto/cosmwasm/wasm/v1/authz.proto b/packages/codegen/proto/cosmwasm/wasm/v1/authz.proto new file mode 100644 index 00000000..6d6260c2 --- /dev/null +++ b/packages/codegen/proto/cosmwasm/wasm/v1/authz.proto @@ -0,0 +1,109 @@ +syntax = "proto3"; +package cosmwasm.wasm.v1; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "google/protobuf/any.proto"; + +option go_package = "github.com/CosmWasm/wasmd/x/wasm/types"; +option (gogoproto.goproto_getters_all) = false; + +// ContractExecutionAuthorization defines authorization for wasm execute. +// Since: wasmd 0.30 +message ContractExecutionAuthorization { + option (cosmos_proto.implements_interface) = "cosmos.authz.Authorization"; + + // Grants for contract executions + repeated ContractGrant grants = 1 [ (gogoproto.nullable) = false ]; +} + +// ContractMigrationAuthorization defines authorization for wasm contract +// migration. Since: wasmd 0.30 +message ContractMigrationAuthorization { + option (cosmos_proto.implements_interface) = "cosmos.authz.Authorization"; + + // Grants for contract migrations + repeated ContractGrant grants = 1 [ (gogoproto.nullable) = false ]; +} + +// ContractGrant a granted permission for a single contract +// Since: wasmd 0.30 +message ContractGrant { + // Contract is the bech32 address of the smart contract + string contract = 1; + + // Limit defines execution limits that are enforced and updated when the grant + // is applied. When the limit lapsed the grant is removed. + google.protobuf.Any limit = 2 + [ (cosmos_proto.accepts_interface) = "ContractAuthzLimitX" ]; + + // Filter define more fine-grained control on the message payload passed + // to the contract in the operation. When no filter applies on execution, the + // operation is prohibited. + google.protobuf.Any filter = 3 + [ (cosmos_proto.accepts_interface) = "ContractAuthzFilterX" ]; +} + +// MaxCallsLimit limited number of calls to the contract. No funds transferable. +// Since: wasmd 0.30 +message MaxCallsLimit { + option (cosmos_proto.implements_interface) = "ContractAuthzLimitX"; + + // Remaining number that is decremented on each execution + uint64 remaining = 1; +} + +// MaxFundsLimit defines the maximal amounts that can be sent to the contract. +// Since: wasmd 0.30 +message MaxFundsLimit { + option (cosmos_proto.implements_interface) = "ContractAuthzLimitX"; + + // Amounts is the maximal amount of tokens transferable to the contract. + repeated cosmos.base.v1beta1.Coin amounts = 1 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; +} + +// CombinedLimit defines the maximal amounts that can be sent to a contract and +// the maximal number of calls executable. Both need to remain >0 to be valid. +// Since: wasmd 0.30 +message CombinedLimit { + option (cosmos_proto.implements_interface) = "ContractAuthzLimitX"; + + // Remaining number that is decremented on each execution + uint64 calls_remaining = 1; + // Amounts is the maximal amount of tokens transferable to the contract. + repeated cosmos.base.v1beta1.Coin amounts = 2 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; +} + +// AllowAllMessagesFilter is a wildcard to allow any type of contract payload +// message. +// Since: wasmd 0.30 +message AllowAllMessagesFilter { + option (cosmos_proto.implements_interface) = "ContractAuthzFilterX"; +} + +// AcceptedMessageKeysFilter accept only the specific contract message keys in +// the json object to be executed. +// Since: wasmd 0.30 +message AcceptedMessageKeysFilter { + option (cosmos_proto.implements_interface) = "ContractAuthzFilterX"; + + // Messages is the list of unique keys + repeated string keys = 1; +} + +// AcceptedMessagesFilter accept only the specific raw contract messages to be +// executed. +// Since: wasmd 0.30 +message AcceptedMessagesFilter { + option (cosmos_proto.implements_interface) = "ContractAuthzFilterX"; + + // Messages is the list of raw contract messages + repeated bytes messages = 1 [ (gogoproto.casttype) = "RawContractMessage" ]; +} diff --git a/packages/codegen/proto/cosmwasm/wasm/v1/genesis.proto b/packages/codegen/proto/cosmwasm/wasm/v1/genesis.proto new file mode 100644 index 00000000..4e728ff4 --- /dev/null +++ b/packages/codegen/proto/cosmwasm/wasm/v1/genesis.proto @@ -0,0 +1,46 @@ +syntax = "proto3"; +package cosmwasm.wasm.v1; + +import "gogoproto/gogo.proto"; +import "cosmwasm/wasm/v1/types.proto"; + +option go_package = "github.com/CosmWasm/wasmd/x/wasm/types"; + +// GenesisState - genesis state of x/wasm +message GenesisState { + Params params = 1 [ (gogoproto.nullable) = false ]; + repeated Code codes = 2 + [ (gogoproto.nullable) = false, (gogoproto.jsontag) = "codes,omitempty" ]; + repeated Contract contracts = 3 [ + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "contracts,omitempty" + ]; + repeated Sequence sequences = 4 [ + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "sequences,omitempty" + ]; +} + +// Code struct encompasses CodeInfo and CodeBytes +message Code { + uint64 code_id = 1 [ (gogoproto.customname) = "CodeID" ]; + CodeInfo code_info = 2 [ (gogoproto.nullable) = false ]; + bytes code_bytes = 3; + // Pinned to wasmvm cache + bool pinned = 4; +} + +// Contract struct encompasses ContractAddress, ContractInfo, and ContractState +message Contract { + string contract_address = 1; + ContractInfo contract_info = 2 [ (gogoproto.nullable) = false ]; + repeated Model contract_state = 3 [ (gogoproto.nullable) = false ]; + repeated ContractCodeHistoryEntry contract_code_history = 4 + [ (gogoproto.nullable) = false ]; +} + +// Sequence key and value of an id generation counter +message Sequence { + bytes id_key = 1 [ (gogoproto.customname) = "IDKey" ]; + uint64 value = 2; +} \ No newline at end of file diff --git a/packages/codegen/proto/cosmwasm/wasm/v1/ibc.proto b/packages/codegen/proto/cosmwasm/wasm/v1/ibc.proto new file mode 100644 index 00000000..d880a707 --- /dev/null +++ b/packages/codegen/proto/cosmwasm/wasm/v1/ibc.proto @@ -0,0 +1,31 @@ +syntax = "proto3"; +package cosmwasm.wasm.v1; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/CosmWasm/wasmd/x/wasm/types"; +option (gogoproto.goproto_getters_all) = false; + +// MsgIBCSend +message MsgIBCSend { + // the channel by which the packet will be sent + string channel = 2 [ (gogoproto.moretags) = "yaml:\"source_channel\"" ]; + + // Timeout height relative to the current block height. + // The timeout is disabled when set to 0. + uint64 timeout_height = 4 + [ (gogoproto.moretags) = "yaml:\"timeout_height\"" ]; + // Timeout timestamp (in nanoseconds) relative to the current block timestamp. + // The timeout is disabled when set to 0. + uint64 timeout_timestamp = 5 + [ (gogoproto.moretags) = "yaml:\"timeout_timestamp\"" ]; + + // Data is the payload to transfer. We must not make assumption what format or + // content is in here. + bytes data = 6; +} + +// MsgIBCCloseChannel port and channel need to be owned by the contract +message MsgIBCCloseChannel { + string channel = 2 [ (gogoproto.moretags) = "yaml:\"source_channel\"" ]; +} diff --git a/packages/codegen/proto/cosmwasm/wasm/v1/proposal.proto b/packages/codegen/proto/cosmwasm/wasm/v1/proposal.proto new file mode 100644 index 00000000..013b4daf --- /dev/null +++ b/packages/codegen/proto/cosmwasm/wasm/v1/proposal.proto @@ -0,0 +1,272 @@ +syntax = "proto3"; +package cosmwasm.wasm.v1; + +import "gogoproto/gogo.proto"; +import "cosmos_proto/cosmos.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "cosmwasm/wasm/v1/types.proto"; + +option go_package = "github.com/CosmWasm/wasmd/x/wasm/types"; +option (gogoproto.goproto_stringer_all) = false; +option (gogoproto.goproto_getters_all) = false; +option (gogoproto.equal_all) = true; + +// StoreCodeProposal gov proposal content type to submit WASM code to the system +message StoreCodeProposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // Title is a short summary + string title = 1; + // Description is a human readable text + string description = 2; + // RunAs is the address that is passed to the contract's environment as sender + string run_as = 3; + // WASMByteCode can be raw or gzip compressed + bytes wasm_byte_code = 4 [ (gogoproto.customname) = "WASMByteCode" ]; + // Used in v1beta1 + reserved 5, 6; + // InstantiatePermission to apply on contract creation, optional + AccessConfig instantiate_permission = 7; + // UnpinCode code on upload, optional + bool unpin_code = 8; + // Source is the URL where the code is hosted + string source = 9; + // Builder is the docker image used to build the code deterministically, used + // for smart contract verification + string builder = 10; + // CodeHash is the SHA256 sum of the code outputted by builder, used for smart + // contract verification + bytes code_hash = 11; +} + +// InstantiateContractProposal gov proposal content type to instantiate a +// contract. +message InstantiateContractProposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // Title is a short summary + string title = 1; + // Description is a human readable text + string description = 2; + // RunAs is the address that is passed to the contract's environment as sender + string run_as = 3; + // Admin is an optional address that can execute migrations + string admin = 4; + // CodeID is the reference to the stored WASM code + uint64 code_id = 5 [ (gogoproto.customname) = "CodeID" ]; + // Label is optional metadata to be stored with a constract instance. + string label = 6; + // Msg json encoded message to be passed to the contract on instantiation + bytes msg = 7 [ (gogoproto.casttype) = "RawContractMessage" ]; + // Funds coins that are transferred to the contract on instantiation + repeated cosmos.base.v1beta1.Coin funds = 8 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; +} + +// InstantiateContract2Proposal gov proposal content type to instantiate +// contract 2 +message InstantiateContract2Proposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // Title is a short summary + string title = 1; + // Description is a human readable text + string description = 2; + // RunAs is the address that is passed to the contract's enviroment as sender + string run_as = 3; + // Admin is an optional address that can execute migrations + string admin = 4; + // CodeID is the reference to the stored WASM code + uint64 code_id = 5 [ (gogoproto.customname) = "CodeID" ]; + // Label is optional metadata to be stored with a constract instance. + string label = 6; + // Msg json encode message to be passed to the contract on instantiation + bytes msg = 7 [ (gogoproto.casttype) = "RawContractMessage" ]; + // Funds coins that are transferred to the contract on instantiation + repeated cosmos.base.v1beta1.Coin funds = 8 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; + // Salt is an arbitrary value provided by the sender. Size can be 1 to 64. + bytes salt = 9; + // FixMsg include the msg value into the hash for the predictable address. + // Default is false + bool fix_msg = 10; +} + +// MigrateContractProposal gov proposal content type to migrate a contract. +message MigrateContractProposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // Title is a short summary + string title = 1; + // Description is a human readable text + string description = 2; + // Note: skipping 3 as this was previously used for unneeded run_as + + // Contract is the address of the smart contract + string contract = 4; + // CodeID references the new WASM code + uint64 code_id = 5 [ (gogoproto.customname) = "CodeID" ]; + // Msg json encoded message to be passed to the contract on migration + bytes msg = 6 [ (gogoproto.casttype) = "RawContractMessage" ]; +} + +// SudoContractProposal gov proposal content type to call sudo on a contract. +message SudoContractProposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // Title is a short summary + string title = 1; + // Description is a human readable text + string description = 2; + // Contract is the address of the smart contract + string contract = 3; + // Msg json encoded message to be passed to the contract as sudo + bytes msg = 4 [ (gogoproto.casttype) = "RawContractMessage" ]; +} + +// ExecuteContractProposal gov proposal content type to call execute on a +// contract. +message ExecuteContractProposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // Title is a short summary + string title = 1; + // Description is a human readable text + string description = 2; + // RunAs is the address that is passed to the contract's environment as sender + string run_as = 3; + // Contract is the address of the smart contract + string contract = 4; + // Msg json encoded message to be passed to the contract as execute + bytes msg = 5 [ (gogoproto.casttype) = "RawContractMessage" ]; + // Funds coins that are transferred to the contract on instantiation + repeated cosmos.base.v1beta1.Coin funds = 6 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; +} + +// UpdateAdminProposal gov proposal content type to set an admin for a contract. +message UpdateAdminProposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // Title is a short summary + string title = 1; + // Description is a human readable text + string description = 2; + // NewAdmin address to be set + string new_admin = 3 [ (gogoproto.moretags) = "yaml:\"new_admin\"" ]; + // Contract is the address of the smart contract + string contract = 4; +} + +// ClearAdminProposal gov proposal content type to clear the admin of a +// contract. +message ClearAdminProposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // Title is a short summary + string title = 1; + // Description is a human readable text + string description = 2; + // Contract is the address of the smart contract + string contract = 3; +} + +// PinCodesProposal gov proposal content type to pin a set of code ids in the +// wasmvm cache. +message PinCodesProposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // Title is a short summary + string title = 1 [ (gogoproto.moretags) = "yaml:\"title\"" ]; + // Description is a human readable text + string description = 2 [ (gogoproto.moretags) = "yaml:\"description\"" ]; + // CodeIDs references the new WASM codes + repeated uint64 code_ids = 3 [ + (gogoproto.customname) = "CodeIDs", + (gogoproto.moretags) = "yaml:\"code_ids\"" + ]; +} + +// UnpinCodesProposal gov proposal content type to unpin a set of code ids in +// the wasmvm cache. +message UnpinCodesProposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // Title is a short summary + string title = 1 [ (gogoproto.moretags) = "yaml:\"title\"" ]; + // Description is a human readable text + string description = 2 [ (gogoproto.moretags) = "yaml:\"description\"" ]; + // CodeIDs references the WASM codes + repeated uint64 code_ids = 3 [ + (gogoproto.customname) = "CodeIDs", + (gogoproto.moretags) = "yaml:\"code_ids\"" + ]; +} + +// AccessConfigUpdate contains the code id and the access config to be +// applied. +message AccessConfigUpdate { + // CodeID is the reference to the stored WASM code to be updated + uint64 code_id = 1 [ (gogoproto.customname) = "CodeID" ]; + // InstantiatePermission to apply to the set of code ids + AccessConfig instantiate_permission = 2 [ (gogoproto.nullable) = false ]; +} + +// UpdateInstantiateConfigProposal gov proposal content type to update +// instantiate config to a set of code ids. +message UpdateInstantiateConfigProposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // Title is a short summary + string title = 1 [ (gogoproto.moretags) = "yaml:\"title\"" ]; + // Description is a human readable text + string description = 2 [ (gogoproto.moretags) = "yaml:\"description\"" ]; + // AccessConfigUpdate contains the list of code ids and the access config + // to be applied. + repeated AccessConfigUpdate access_config_updates = 3 + [ (gogoproto.nullable) = false ]; +} + +// StoreAndInstantiateContractProposal gov proposal content type to store +// and instantiate the contract. +message StoreAndInstantiateContractProposal { + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // Title is a short summary + string title = 1; + // Description is a human readable text + string description = 2; + // RunAs is the address that is passed to the contract's environment as sender + string run_as = 3; + // WASMByteCode can be raw or gzip compressed + bytes wasm_byte_code = 4 [ (gogoproto.customname) = "WASMByteCode" ]; + // InstantiatePermission to apply on contract creation, optional + AccessConfig instantiate_permission = 5; + // UnpinCode code on upload, optional + bool unpin_code = 6; + // Admin is an optional address that can execute migrations + string admin = 7; + // Label is optional metadata to be stored with a constract instance. + string label = 8; + // Msg json encoded message to be passed to the contract on instantiation + bytes msg = 9 [ (gogoproto.casttype) = "RawContractMessage" ]; + // Funds coins that are transferred to the contract on instantiation + repeated cosmos.base.v1beta1.Coin funds = 10 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; + // Source is the URL where the code is hosted + string source = 11; + // Builder is the docker image used to build the code deterministically, used + // for smart contract verification + string builder = 12; + // CodeHash is the SHA256 sum of the code outputted by builder, used for smart + // contract verification + bytes code_hash = 13; +} diff --git a/packages/codegen/proto/cosmwasm/wasm/v1/query.proto b/packages/codegen/proto/cosmwasm/wasm/v1/query.proto new file mode 100644 index 00000000..ffe48d24 --- /dev/null +++ b/packages/codegen/proto/cosmwasm/wasm/v1/query.proto @@ -0,0 +1,263 @@ +syntax = "proto3"; +package cosmwasm.wasm.v1; + +import "gogoproto/gogo.proto"; +import "cosmwasm/wasm/v1/types.proto"; +import "google/api/annotations.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; + +option go_package = "github.com/CosmWasm/wasmd/x/wasm/types"; +option (gogoproto.goproto_getters_all) = false; +option (gogoproto.equal_all) = false; + +// Query provides defines the gRPC querier service +service Query { + // ContractInfo gets the contract meta data + rpc ContractInfo(QueryContractInfoRequest) + returns (QueryContractInfoResponse) { + option (google.api.http).get = "/cosmwasm/wasm/v1/contract/{address}"; + } + // ContractHistory gets the contract code history + rpc ContractHistory(QueryContractHistoryRequest) + returns (QueryContractHistoryResponse) { + option (google.api.http).get = + "/cosmwasm/wasm/v1/contract/{address}/history"; + } + // ContractsByCode lists all smart contracts for a code id + rpc ContractsByCode(QueryContractsByCodeRequest) + returns (QueryContractsByCodeResponse) { + option (google.api.http).get = "/cosmwasm/wasm/v1/code/{code_id}/contracts"; + } + // AllContractState gets all raw store data for a single contract + rpc AllContractState(QueryAllContractStateRequest) + returns (QueryAllContractStateResponse) { + option (google.api.http).get = "/cosmwasm/wasm/v1/contract/{address}/state"; + } + // RawContractState gets single key from the raw store data of a contract + rpc RawContractState(QueryRawContractStateRequest) + returns (QueryRawContractStateResponse) { + option (google.api.http).get = + "/cosmwasm/wasm/v1/contract/{address}/raw/{query_data}"; + } + // SmartContractState get smart query result from the contract + rpc SmartContractState(QuerySmartContractStateRequest) + returns (QuerySmartContractStateResponse) { + option (google.api.http).get = + "/cosmwasm/wasm/v1/contract/{address}/smart/{query_data}"; + } + // Code gets the binary code and metadata for a singe wasm code + rpc Code(QueryCodeRequest) returns (QueryCodeResponse) { + option (google.api.http).get = "/cosmwasm/wasm/v1/code/{code_id}"; + } + // Codes gets the metadata for all stored wasm codes + rpc Codes(QueryCodesRequest) returns (QueryCodesResponse) { + option (google.api.http).get = "/cosmwasm/wasm/v1/code"; + } + + // PinnedCodes gets the pinned code ids + rpc PinnedCodes(QueryPinnedCodesRequest) returns (QueryPinnedCodesResponse) { + option (google.api.http).get = "/cosmwasm/wasm/v1/codes/pinned"; + } + + // Params gets the module params + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/cosmwasm/wasm/v1/codes/params"; + } + + // ContractsByCreator gets the contracts by creator + rpc ContractsByCreator(QueryContractsByCreatorRequest) + returns (QueryContractsByCreatorResponse) { + option (google.api.http).get = + "/cosmwasm/wasm/v1/contracts/creator/{creator_address}"; + } +} + +// QueryContractInfoRequest is the request type for the Query/ContractInfo RPC +// method +message QueryContractInfoRequest { + // address is the address of the contract to query + string address = 1; +} +// QueryContractInfoResponse is the response type for the Query/ContractInfo RPC +// method +message QueryContractInfoResponse { + option (gogoproto.equal) = true; + + // address is the address of the contract + string address = 1; + ContractInfo contract_info = 2 [ + (gogoproto.embed) = true, + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "" + ]; +} + +// QueryContractHistoryRequest is the request type for the Query/ContractHistory +// RPC method +message QueryContractHistoryRequest { + // address is the address of the contract to query + string address = 1; + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryContractHistoryResponse is the response type for the +// Query/ContractHistory RPC method +message QueryContractHistoryResponse { + repeated ContractCodeHistoryEntry entries = 1 + [ (gogoproto.nullable) = false ]; + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryContractsByCodeRequest is the request type for the Query/ContractsByCode +// RPC method +message QueryContractsByCodeRequest { + uint64 code_id = 1; // grpc-gateway_out does not support Go style CodID + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryContractsByCodeResponse is the response type for the +// Query/ContractsByCode RPC method +message QueryContractsByCodeResponse { + // contracts are a set of contract addresses + repeated string contracts = 1; + + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryAllContractStateRequest is the request type for the +// Query/AllContractState RPC method +message QueryAllContractStateRequest { + // address is the address of the contract + string address = 1; + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryAllContractStateResponse is the response type for the +// Query/AllContractState RPC method +message QueryAllContractStateResponse { + repeated Model models = 1 [ (gogoproto.nullable) = false ]; + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryRawContractStateRequest is the request type for the +// Query/RawContractState RPC method +message QueryRawContractStateRequest { + // address is the address of the contract + string address = 1; + bytes query_data = 2; +} + +// QueryRawContractStateResponse is the response type for the +// Query/RawContractState RPC method +message QueryRawContractStateResponse { + // Data contains the raw store data + bytes data = 1; +} + +// QuerySmartContractStateRequest is the request type for the +// Query/SmartContractState RPC method +message QuerySmartContractStateRequest { + // address is the address of the contract + string address = 1; + // QueryData contains the query data passed to the contract + bytes query_data = 2 [ (gogoproto.casttype) = "RawContractMessage" ]; +} + +// QuerySmartContractStateResponse is the response type for the +// Query/SmartContractState RPC method +message QuerySmartContractStateResponse { + // Data contains the json data returned from the smart contract + bytes data = 1 [ (gogoproto.casttype) = "RawContractMessage" ]; +} + +// QueryCodeRequest is the request type for the Query/Code RPC method +message QueryCodeRequest { + uint64 code_id = 1; // grpc-gateway_out does not support Go style CodID +} + +// CodeInfoResponse contains code meta data from CodeInfo +message CodeInfoResponse { + option (gogoproto.equal) = true; + + uint64 code_id = 1 [ + (gogoproto.customname) = "CodeID", + (gogoproto.jsontag) = "id" + ]; // id for legacy support + string creator = 2; + bytes data_hash = 3 + [ (gogoproto.casttype) = + "github.com/tendermint/tendermint/libs/bytes.HexBytes" ]; + // Used in v1beta1 + reserved 4, 5; + AccessConfig instantiate_permission = 6 [ (gogoproto.nullable) = false ]; +} + +// QueryCodeResponse is the response type for the Query/Code RPC method +message QueryCodeResponse { + option (gogoproto.equal) = true; + CodeInfoResponse code_info = 1 + [ (gogoproto.embed) = true, (gogoproto.jsontag) = "" ]; + bytes data = 2 [ (gogoproto.jsontag) = "data" ]; +} + +// QueryCodesRequest is the request type for the Query/Codes RPC method +message QueryCodesRequest { + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// QueryCodesResponse is the response type for the Query/Codes RPC method +message QueryCodesResponse { + repeated CodeInfoResponse code_infos = 1 [ (gogoproto.nullable) = false ]; + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryPinnedCodesRequest is the request type for the Query/PinnedCodes +// RPC method +message QueryPinnedCodesRequest { + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryPinnedCodesResponse is the response type for the +// Query/PinnedCodes RPC method +message QueryPinnedCodesResponse { + repeated uint64 code_ids = 1 + [ (gogoproto.nullable) = false, (gogoproto.customname) = "CodeIDs" ]; + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryParamsRequest is the request type for the Query/Params RPC method. +message QueryParamsRequest {} + +// QueryParamsResponse is the response type for the Query/Params RPC method. +message QueryParamsResponse { + // params defines the parameters of the module. + Params params = 1 [ (gogoproto.nullable) = false ]; +} + +// QueryContractsByCreatorRequest is the request type for the +// Query/ContractsByCreator RPC method. +message QueryContractsByCreatorRequest { + // CreatorAddress is the address of contract creator + string creator_address = 1; + // Pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryContractsByCreatorResponse is the response type for the +// Query/ContractsByCreator RPC method. +message QueryContractsByCreatorResponse { + // ContractAddresses result set + repeated string contract_addresses = 1; + // Pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} \ No newline at end of file diff --git a/packages/codegen/proto/cosmwasm/wasm/v1/tx.proto b/packages/codegen/proto/cosmwasm/wasm/v1/tx.proto new file mode 100644 index 00000000..04acc8ef --- /dev/null +++ b/packages/codegen/proto/cosmwasm/wasm/v1/tx.proto @@ -0,0 +1,176 @@ +syntax = "proto3"; +package cosmwasm.wasm.v1; + +import "cosmos/base/v1beta1/coin.proto"; +import "gogoproto/gogo.proto"; +import "cosmwasm/wasm/v1/types.proto"; + +option go_package = "github.com/CosmWasm/wasmd/x/wasm/types"; +option (gogoproto.goproto_getters_all) = false; + +// Msg defines the wasm Msg service. +service Msg { + // StoreCode to submit Wasm code to the system + rpc StoreCode(MsgStoreCode) returns (MsgStoreCodeResponse); + // InstantiateContract creates a new smart contract instance for the given + // code id. + rpc InstantiateContract(MsgInstantiateContract) + returns (MsgInstantiateContractResponse); + // InstantiateContract2 creates a new smart contract instance for the given + // code id with a predictable address + rpc InstantiateContract2(MsgInstantiateContract2) + returns (MsgInstantiateContract2Response); + // Execute submits the given message data to a smart contract + rpc ExecuteContract(MsgExecuteContract) returns (MsgExecuteContractResponse); + // Migrate runs a code upgrade/ downgrade for a smart contract + rpc MigrateContract(MsgMigrateContract) returns (MsgMigrateContractResponse); + // UpdateAdmin sets a new admin for a smart contract + rpc UpdateAdmin(MsgUpdateAdmin) returns (MsgUpdateAdminResponse); + // ClearAdmin removes any admin stored for a smart contract + rpc ClearAdmin(MsgClearAdmin) returns (MsgClearAdminResponse); +} + +// MsgStoreCode submit Wasm code to the system +message MsgStoreCode { + // Sender is the that actor that signed the messages + string sender = 1; + // WASMByteCode can be raw or gzip compressed + bytes wasm_byte_code = 2 [ (gogoproto.customname) = "WASMByteCode" ]; + // Used in v1beta1 + reserved 3, 4; + // InstantiatePermission access control to apply on contract creation, + // optional + AccessConfig instantiate_permission = 5; +} +// MsgStoreCodeResponse returns store result data. +message MsgStoreCodeResponse { + // CodeID is the reference to the stored WASM code + uint64 code_id = 1 [ (gogoproto.customname) = "CodeID" ]; + // Checksum is the sha256 hash of the stored code + bytes checksum = 2; +} + +// MsgInstantiateContract create a new smart contract instance for the given +// code id. +message MsgInstantiateContract { + // Sender is the that actor that signed the messages + string sender = 1; + // Admin is an optional address that can execute migrations + string admin = 2; + // CodeID is the reference to the stored WASM code + uint64 code_id = 3 [ (gogoproto.customname) = "CodeID" ]; + // Label is optional metadata to be stored with a contract instance. + string label = 4; + // Msg json encoded message to be passed to the contract on instantiation + bytes msg = 5 [ (gogoproto.casttype) = "RawContractMessage" ]; + // Funds coins that are transferred to the contract on instantiation + repeated cosmos.base.v1beta1.Coin funds = 6 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; +} + +// MsgInstantiateContract2 create a new smart contract instance for the given +// code id with a predicable address. +message MsgInstantiateContract2 { + // Sender is the that actor that signed the messages + string sender = 1; + // Admin is an optional address that can execute migrations + string admin = 2; + // CodeID is the reference to the stored WASM code + uint64 code_id = 3 [ (gogoproto.customname) = "CodeID" ]; + // Label is optional metadata to be stored with a contract instance. + string label = 4; + // Msg json encoded message to be passed to the contract on instantiation + bytes msg = 5 [ (gogoproto.casttype) = "RawContractMessage" ]; + // Funds coins that are transferred to the contract on instantiation + repeated cosmos.base.v1beta1.Coin funds = 6 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; + // Salt is an arbitrary value provided by the sender. Size can be 1 to 64. + bytes salt = 7; + // FixMsg include the msg value into the hash for the predictable address. + // Default is false + bool fix_msg = 8; +} + +// MsgInstantiateContractResponse return instantiation result data +message MsgInstantiateContractResponse { + // Address is the bech32 address of the new contract instance. + string address = 1; + // Data contains bytes to returned from the contract + bytes data = 2; +} + +// MsgInstantiateContract2Response return instantiation result data +message MsgInstantiateContract2Response { + // Address is the bech32 address of the new contract instance. + string address = 1; + // Data contains bytes to returned from the contract + bytes data = 2; +} + +// MsgExecuteContract submits the given message data to a smart contract +message MsgExecuteContract { + // Sender is the that actor that signed the messages + string sender = 1; + // Contract is the address of the smart contract + string contract = 2; + // Msg json encoded message to be passed to the contract + bytes msg = 3 [ (gogoproto.casttype) = "RawContractMessage" ]; + // Funds coins that are transferred to the contract on execution + repeated cosmos.base.v1beta1.Coin funds = 5 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; +} + +// MsgExecuteContractResponse returns execution result data. +message MsgExecuteContractResponse { + // Data contains bytes to returned from the contract + bytes data = 1; +} + +// MsgMigrateContract runs a code upgrade/ downgrade for a smart contract +message MsgMigrateContract { + // Sender is the that actor that signed the messages + string sender = 1; + // Contract is the address of the smart contract + string contract = 2; + // CodeID references the new WASM code + uint64 code_id = 3 [ (gogoproto.customname) = "CodeID" ]; + // Msg json encoded message to be passed to the contract on migration + bytes msg = 4 [ (gogoproto.casttype) = "RawContractMessage" ]; +} + +// MsgMigrateContractResponse returns contract migration result data. +message MsgMigrateContractResponse { + // Data contains same raw bytes returned as data from the wasm contract. + // (May be empty) + bytes data = 1; +} + +// MsgUpdateAdmin sets a new admin for a smart contract +message MsgUpdateAdmin { + // Sender is the that actor that signed the messages + string sender = 1; + // NewAdmin address to be set + string new_admin = 2; + // Contract is the address of the smart contract + string contract = 3; +} + +// MsgUpdateAdminResponse returns empty data +message MsgUpdateAdminResponse {} + +// MsgClearAdmin removes any admin stored for a smart contract +message MsgClearAdmin { + // Sender is the that actor that signed the messages + string sender = 1; + // Contract is the address of the smart contract + string contract = 3; +} + +// MsgClearAdminResponse returns empty data +message MsgClearAdminResponse {} diff --git a/packages/codegen/proto/cosmwasm/wasm/v1/types.proto b/packages/codegen/proto/cosmwasm/wasm/v1/types.proto new file mode 100644 index 00000000..216b24e3 --- /dev/null +++ b/packages/codegen/proto/cosmwasm/wasm/v1/types.proto @@ -0,0 +1,144 @@ +syntax = "proto3"; +package cosmwasm.wasm.v1; + +import "cosmos_proto/cosmos.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; + +option go_package = "github.com/CosmWasm/wasmd/x/wasm/types"; +option (gogoproto.goproto_getters_all) = false; +option (gogoproto.equal_all) = true; + +// AccessType permission types +enum AccessType { + option (gogoproto.goproto_enum_prefix) = false; + option (gogoproto.goproto_enum_stringer) = false; + // AccessTypeUnspecified placeholder for empty value + ACCESS_TYPE_UNSPECIFIED = 0 + [ (gogoproto.enumvalue_customname) = "AccessTypeUnspecified" ]; + // AccessTypeNobody forbidden + ACCESS_TYPE_NOBODY = 1 + [ (gogoproto.enumvalue_customname) = "AccessTypeNobody" ]; + // AccessTypeOnlyAddress restricted to a single address + // Deprecated: use AccessTypeAnyOfAddresses instead + ACCESS_TYPE_ONLY_ADDRESS = 2 + [ (gogoproto.enumvalue_customname) = "AccessTypeOnlyAddress" ]; + // AccessTypeEverybody unrestricted + ACCESS_TYPE_EVERYBODY = 3 + [ (gogoproto.enumvalue_customname) = "AccessTypeEverybody" ]; + // AccessTypeAnyOfAddresses allow any of the addresses + ACCESS_TYPE_ANY_OF_ADDRESSES = 4 + [ (gogoproto.enumvalue_customname) = "AccessTypeAnyOfAddresses" ]; +} + +// AccessTypeParam +message AccessTypeParam { + option (gogoproto.goproto_stringer) = true; + AccessType value = 1 [ (gogoproto.moretags) = "yaml:\"value\"" ]; +} + +// AccessConfig access control type. +message AccessConfig { + option (gogoproto.goproto_stringer) = true; + AccessType permission = 1 [ (gogoproto.moretags) = "yaml:\"permission\"" ]; + + // Address + // Deprecated: replaced by addresses + string address = 2 [ (gogoproto.moretags) = "yaml:\"address\"" ]; + repeated string addresses = 3 [ (gogoproto.moretags) = "yaml:\"addresses\"" ]; +} + +// Params defines the set of wasm parameters. +message Params { + option (gogoproto.goproto_stringer) = false; + AccessConfig code_upload_access = 1 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"code_upload_access\"" + ]; + AccessType instantiate_default_permission = 2 + [ (gogoproto.moretags) = "yaml:\"instantiate_default_permission\"" ]; +} + +// CodeInfo is data for the uploaded contract WASM code +message CodeInfo { + // CodeHash is the unique identifier created by wasmvm + bytes code_hash = 1; + // Creator address who initially stored the code + string creator = 2; + // Used in v1beta1 + reserved 3, 4; + // InstantiateConfig access control to apply on contract creation, optional + AccessConfig instantiate_config = 5 [ (gogoproto.nullable) = false ]; +} + +// ContractInfo stores a WASM contract instance +message ContractInfo { + option (gogoproto.equal) = true; + + // CodeID is the reference to the stored Wasm code + uint64 code_id = 1 [ (gogoproto.customname) = "CodeID" ]; + // Creator address who initially instantiated the contract + string creator = 2; + // Admin is an optional address that can execute migrations + string admin = 3; + // Label is optional metadata to be stored with a contract instance. + string label = 4; + // Created Tx position when the contract was instantiated. + AbsoluteTxPosition created = 5; + string ibc_port_id = 6 [ (gogoproto.customname) = "IBCPortID" ]; + + // Extension is an extension point to store custom metadata within the + // persistence model. + google.protobuf.Any extension = 7 + [ (cosmos_proto.accepts_interface) = "ContractInfoExtension" ]; +} + +// ContractCodeHistoryOperationType actions that caused a code change +enum ContractCodeHistoryOperationType { + option (gogoproto.goproto_enum_prefix) = false; + // ContractCodeHistoryOperationTypeUnspecified placeholder for empty value + CONTRACT_CODE_HISTORY_OPERATION_TYPE_UNSPECIFIED = 0 + [ (gogoproto.enumvalue_customname) = + "ContractCodeHistoryOperationTypeUnspecified" ]; + // ContractCodeHistoryOperationTypeInit on chain contract instantiation + CONTRACT_CODE_HISTORY_OPERATION_TYPE_INIT = 1 + [ (gogoproto.enumvalue_customname) = + "ContractCodeHistoryOperationTypeInit" ]; + // ContractCodeHistoryOperationTypeMigrate code migration + CONTRACT_CODE_HISTORY_OPERATION_TYPE_MIGRATE = 2 + [ (gogoproto.enumvalue_customname) = + "ContractCodeHistoryOperationTypeMigrate" ]; + // ContractCodeHistoryOperationTypeGenesis based on genesis data + CONTRACT_CODE_HISTORY_OPERATION_TYPE_GENESIS = 3 + [ (gogoproto.enumvalue_customname) = + "ContractCodeHistoryOperationTypeGenesis" ]; +} + +// ContractCodeHistoryEntry metadata to a contract. +message ContractCodeHistoryEntry { + ContractCodeHistoryOperationType operation = 1; + // CodeID is the reference to the stored WASM code + uint64 code_id = 2 [ (gogoproto.customname) = "CodeID" ]; + // Updated Tx position when the operation was executed. + AbsoluteTxPosition updated = 3; + bytes msg = 4 [ (gogoproto.casttype) = "RawContractMessage" ]; +} + +// AbsoluteTxPosition is a unique transaction position that allows for global +// ordering of transactions. +message AbsoluteTxPosition { + // BlockHeight is the block the contract was created at + uint64 block_height = 1; + // TxIndex is a monotonic counter within the block (actual transaction index, + // or gas consumed) + uint64 tx_index = 2; +} + +// Model is a struct that holds a KV pair +message Model { + // hex-encode key to read it better (this is often ascii) + bytes key = 1 [ (gogoproto.casttype) = + "github.com/tendermint/tendermint/libs/bytes.HexBytes" ]; + // base64-encode raw value + bytes value = 2; +} diff --git a/packages/codegen/proto/gogoproto/LICENSE b/packages/codegen/proto/gogoproto/LICENSE new file mode 100644 index 00000000..992eb2bd --- /dev/null +++ b/packages/codegen/proto/gogoproto/LICENSE @@ -0,0 +1,34 @@ +Copyright (c) 2013, The GoGo Authors. All rights reserved. + +Protocol Buffers for Go with Gadgets + +Go support for Protocol Buffers - Google's data interchange format + +Copyright 2010 The Go Authors. All rights reserved. +https://github.com/golang/protobuf + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/packages/codegen/proto/gogoproto/README.md b/packages/codegen/proto/gogoproto/README.md new file mode 100644 index 00000000..4cfc4768 --- /dev/null +++ b/packages/codegen/proto/gogoproto/README.md @@ -0,0 +1 @@ +# gogoproto \ No newline at end of file diff --git a/packages/codegen/proto/gogoproto/gogo.proto b/packages/codegen/proto/gogoproto/gogo.proto new file mode 100644 index 00000000..49e78f99 --- /dev/null +++ b/packages/codegen/proto/gogoproto/gogo.proto @@ -0,0 +1,145 @@ +// Protocol Buffers for Go with Gadgets +// +// Copyright (c) 2013, The GoGo Authors. All rights reserved. +// http://github.com/gogo/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; +package gogoproto; + +import "google/protobuf/descriptor.proto"; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "GoGoProtos"; +option go_package = "github.com/gogo/protobuf/gogoproto"; + +extend google.protobuf.EnumOptions { + optional bool goproto_enum_prefix = 62001; + optional bool goproto_enum_stringer = 62021; + optional bool enum_stringer = 62022; + optional string enum_customname = 62023; + optional bool enumdecl = 62024; +} + +extend google.protobuf.EnumValueOptions { + optional string enumvalue_customname = 66001; +} + +extend google.protobuf.FileOptions { + optional bool goproto_getters_all = 63001; + optional bool goproto_enum_prefix_all = 63002; + optional bool goproto_stringer_all = 63003; + optional bool verbose_equal_all = 63004; + optional bool face_all = 63005; + optional bool gostring_all = 63006; + optional bool populate_all = 63007; + optional bool stringer_all = 63008; + optional bool onlyone_all = 63009; + + optional bool equal_all = 63013; + optional bool description_all = 63014; + optional bool testgen_all = 63015; + optional bool benchgen_all = 63016; + optional bool marshaler_all = 63017; + optional bool unmarshaler_all = 63018; + optional bool stable_marshaler_all = 63019; + + optional bool sizer_all = 63020; + + optional bool goproto_enum_stringer_all = 63021; + optional bool enum_stringer_all = 63022; + + optional bool unsafe_marshaler_all = 63023; + optional bool unsafe_unmarshaler_all = 63024; + + optional bool goproto_extensions_map_all = 63025; + optional bool goproto_unrecognized_all = 63026; + optional bool gogoproto_import = 63027; + optional bool protosizer_all = 63028; + optional bool compare_all = 63029; + optional bool typedecl_all = 63030; + optional bool enumdecl_all = 63031; + + optional bool goproto_registration = 63032; + optional bool messagename_all = 63033; + + optional bool goproto_sizecache_all = 63034; + optional bool goproto_unkeyed_all = 63035; +} + +extend google.protobuf.MessageOptions { + optional bool goproto_getters = 64001; + optional bool goproto_stringer = 64003; + optional bool verbose_equal = 64004; + optional bool face = 64005; + optional bool gostring = 64006; + optional bool populate = 64007; + optional bool stringer = 67008; + optional bool onlyone = 64009; + + optional bool equal = 64013; + optional bool description = 64014; + optional bool testgen = 64015; + optional bool benchgen = 64016; + optional bool marshaler = 64017; + optional bool unmarshaler = 64018; + optional bool stable_marshaler = 64019; + + optional bool sizer = 64020; + + optional bool unsafe_marshaler = 64023; + optional bool unsafe_unmarshaler = 64024; + + optional bool goproto_extensions_map = 64025; + optional bool goproto_unrecognized = 64026; + + optional bool protosizer = 64028; + optional bool compare = 64029; + + optional bool typedecl = 64030; + + optional bool messagename = 64033; + + optional bool goproto_sizecache = 64034; + optional bool goproto_unkeyed = 64035; +} + +extend google.protobuf.FieldOptions { + optional bool nullable = 65001; + optional bool embed = 65002; + optional string customtype = 65003; + optional string customname = 65004; + optional string jsontag = 65005; + optional string moretags = 65006; + optional string casttype = 65007; + optional string castkey = 65008; + optional string castvalue = 65009; + + optional bool stdtime = 65010; + optional bool stdduration = 65011; + optional bool wktpointer = 65012; + + optional string castrepeated = 65013; +} diff --git a/packages/codegen/proto/google/LICENSE b/packages/codegen/proto/google/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/packages/codegen/proto/google/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/codegen/proto/google/README.md b/packages/codegen/proto/google/README.md new file mode 100644 index 00000000..3bdc1f83 --- /dev/null +++ b/packages/codegen/proto/google/README.md @@ -0,0 +1 @@ +# google \ No newline at end of file diff --git a/packages/codegen/proto/google/api/annotations.proto b/packages/codegen/proto/google/api/annotations.proto new file mode 100644 index 00000000..efdab3db --- /dev/null +++ b/packages/codegen/proto/google/api/annotations.proto @@ -0,0 +1,31 @@ +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/http.proto"; +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "AnnotationsProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // See `HttpRule`. + HttpRule http = 72295728; +} diff --git a/packages/codegen/proto/google/api/http.proto b/packages/codegen/proto/google/api/http.proto new file mode 100644 index 00000000..113fa936 --- /dev/null +++ b/packages/codegen/proto/google/api/http.proto @@ -0,0 +1,375 @@ +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "HttpProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Defines the HTTP configuration for an API service. It contains a list of +// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method +// to one or more HTTP REST API methods. +message Http { + // A list of HTTP configuration rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated HttpRule rules = 1; + + // When set to true, URL path parameters will be fully URI-decoded except in + // cases of single segment matches in reserved expansion, where "%2F" will be + // left encoded. + // + // The default behavior is to not decode RFC 6570 reserved characters in multi + // segment matches. + bool fully_decode_reserved_expansion = 2; +} + +// # gRPC Transcoding +// +// gRPC Transcoding is a feature for mapping between a gRPC method and one or +// more HTTP REST endpoints. It allows developers to build a single API service +// that supports both gRPC APIs and REST APIs. Many systems, including [Google +// APIs](https://github.com/googleapis/googleapis), +// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC +// Gateway](https://github.com/grpc-ecosystem/grpc-gateway), +// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature +// and use it for large scale production services. +// +// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +// how different portions of the gRPC request message are mapped to the URL +// path, URL query parameters, and HTTP request body. It also controls how the +// gRPC response message is mapped to the HTTP response body. `HttpRule` is +// typically specified as an `google.api.http` annotation on the gRPC method. +// +// Each mapping specifies a URL path template and an HTTP method. The path +// template may refer to one or more fields in the gRPC request message, as long +// as each field is a non-repeated field with a primitive (non-message) type. +// The path template controls how fields of the request message are mapped to +// the URL path. +// +// Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/{name=messages/*}" +// }; +// } +// } +// message GetMessageRequest { +// string name = 1; // Mapped to URL path. +// } +// message Message { +// string text = 1; // The resource content. +// } +// +// This enables an HTTP REST to gRPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` +// +// Any fields in the request message which are not bound by the path template +// automatically become HTTP query parameters if there is no HTTP request body. +// For example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get:"/v1/messages/{message_id}" +// }; +// } +// } +// message GetMessageRequest { +// message SubMessage { +// string subfield = 1; +// } +// string message_id = 1; // Mapped to URL path. +// int64 revision = 2; // Mapped to URL query parameter `revision`. +// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +// } +// +// This enables a HTTP JSON to RPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | +// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: +// "foo"))` +// +// Note that fields which are mapped to URL query parameters must have a +// primitive type or a repeated primitive type or a non-repeated message type. +// In the case of a repeated type, the parameter can be repeated in the URL +// as `...?param=A¶m=B`. In the case of a message type, each field of the +// message is mapped to a separate parameter, such as +// `...?foo.a=A&foo.b=B&foo.c=C`. +// +// For HTTP methods that allow a request body, the `body` field +// specifies the mapping. Consider a REST update method on the +// message resource collection: +// +// service Messaging { +// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "message" +// }; +// } +// } +// message UpdateMessageRequest { +// string message_id = 1; // mapped to the URL +// Message message = 2; // mapped to the body +// } +// +// The following HTTP JSON to RPC mapping is enabled, where the +// representation of the JSON in the request body is determined by +// protos JSON encoding: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" message { text: "Hi!" })` +// +// The special name `*` can be used in the body mapping to define that +// every field not bound by the path template should be mapped to the +// request body. This enables the following alternative definition of +// the update method: +// +// service Messaging { +// rpc UpdateMessage(Message) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "*" +// }; +// } +// } +// message Message { +// string message_id = 1; +// string text = 2; +// } +// +// +// The following HTTP JSON to RPC mapping is enabled: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" text: "Hi!")` +// +// Note that when using `*` in the body mapping, it is not possible to +// have HTTP parameters, as all fields not bound by the path end in +// the body. This makes this option more rarely used in practice when +// defining REST APIs. The common usage of `*` is in custom methods +// which don't use the URL at all for transferring data. +// +// It is possible to define multiple HTTP methods for one RPC by using +// the `additional_bindings` option. Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/messages/{message_id}" +// additional_bindings { +// get: "/v1/users/{user_id}/messages/{message_id}" +// } +// }; +// } +// } +// message GetMessageRequest { +// string message_id = 1; +// string user_id = 2; +// } +// +// This enables the following two alternative HTTP JSON to RPC mappings: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` +// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: +// "123456")` +// +// ## Rules for HTTP mapping +// +// 1. Leaf request fields (recursive expansion nested messages in the request +// message) are classified into three categories: +// - Fields referred by the path template. They are passed via the URL path. +// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP +// request body. +// - All other fields are passed via the URL query parameters, and the +// parameter name is the field path in the request message. A repeated +// field can be represented as multiple query parameters under the same +// name. +// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields +// are passed via URL path and HTTP request body. +// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all +// fields are passed via URL path and URL query parameters. +// +// ### Path template syntax +// +// Template = "/" Segments [ Verb ] ; +// Segments = Segment { "/" Segment } ; +// Segment = "*" | "**" | LITERAL | Variable ; +// Variable = "{" FieldPath [ "=" Segments ] "}" ; +// FieldPath = IDENT { "." IDENT } ; +// Verb = ":" LITERAL ; +// +// The syntax `*` matches a single URL path segment. The syntax `**` matches +// zero or more URL path segments, which must be the last part of the URL path +// except the `Verb`. +// +// The syntax `Variable` matches part of the URL path as specified by its +// template. A variable template must not contain other variables. If a variable +// matches a single path segment, its template may be omitted, e.g. `{var}` +// is equivalent to `{var=*}`. +// +// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +// contains any reserved character, such characters should be percent-encoded +// before the matching. +// +// If a variable contains exactly one path segment, such as `"{var}"` or +// `"{var=*}"`, when such a variable is expanded into a URL path on the client +// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The +// server side does the reverse decoding. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{var}`. +// +// If a variable contains multiple path segments, such as `"{var=foo/*}"` +// or `"{var=**}"`, when such a variable is expanded into a URL path on the +// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. +// The server side does the reverse decoding, except "%2F" and "%2f" are left +// unchanged. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{+var}`. +// +// ## Using gRPC API Service Configuration +// +// gRPC API Service Configuration (service config) is a configuration language +// for configuring a gRPC service to become a user-facing product. The +// service config is simply the YAML representation of the `google.api.Service` +// proto message. +// +// As an alternative to annotating your proto file, you can configure gRPC +// transcoding in your service config YAML files. You do this by specifying a +// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +// effect as the proto annotation. This can be particularly useful if you +// have a proto that is reused in multiple services. Note that any transcoding +// specified in the service config will override any matching transcoding +// configuration in the proto. +// +// Example: +// +// http: +// rules: +// # Selects a gRPC method and applies HttpRule to it. +// - selector: example.v1.Messaging.GetMessage +// get: /v1/messages/{message_id}/{sub.subfield} +// +// ## Special notes +// +// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +// proto to JSON conversion must follow the [proto3 +// specification](https://developers.google.com/protocol-buffers/docs/proto3#json). +// +// While the single segment variable follows the semantics of +// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String +// Expansion, the multi segment variable **does not** follow RFC 6570 Section +// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +// does not expand special characters like `?` and `#`, which would lead +// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +// for multi segment variables. +// +// The path variables **must not** refer to any repeated or mapped field, +// because client libraries are not capable of handling such variable expansion. +// +// The path variables **must not** capture the leading "/" character. The reason +// is that the most common use case "{var}" does not capture the leading "/" +// character. For consistency, all path variables must share the same behavior. +// +// Repeated message fields must not be mapped to URL query parameters, because +// no client library can support such complicated mapping. +// +// If an API needs to use a JSON array for request or response body, it can map +// the request or response body to a repeated field. However, some gRPC +// Transcoding implementations may not support this feature. +message HttpRule { + // Selects a method to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // Determines the URL pattern is matched by this rules. This pattern can be + // used with any of the {get|put|post|delete|patch} methods. A custom method + // can be defined using the 'custom' field. + oneof pattern { + // Maps to HTTP GET. Used for listing and getting information about + // resources. + string get = 2; + + // Maps to HTTP PUT. Used for replacing a resource. + string put = 3; + + // Maps to HTTP POST. Used for creating a resource or performing an action. + string post = 4; + + // Maps to HTTP DELETE. Used for deleting a resource. + string delete = 5; + + // Maps to HTTP PATCH. Used for updating a resource. + string patch = 6; + + // The custom pattern is used for specifying an HTTP method that is not + // included in the `pattern` field, such as HEAD, or "*" to leave the + // HTTP method unspecified for this rule. The wild-card rule is useful + // for services that provide content to Web (HTML) clients. + CustomHttpPattern custom = 8; + } + + // The name of the request field whose value is mapped to the HTTP request + // body, or `*` for mapping all request fields not captured by the path + // pattern to the HTTP body, or omitted for not having any HTTP request body. + // + // NOTE: the referred field must be present at the top-level of the request + // message type. + string body = 7; + + // Optional. The name of the response field whose value is mapped to the HTTP + // response body. When omitted, the entire response message will be used + // as the HTTP response body. + // + // NOTE: The referred field must be present at the top-level of the response + // message type. + string response_body = 12; + + // Additional HTTP bindings for the selector. Nested bindings must + // not contain an `additional_bindings` field themselves (that is, + // the nesting may only be one level deep). + repeated HttpRule additional_bindings = 11; +} + +// A custom pattern is used for defining custom HTTP verb. +message CustomHttpPattern { + // The name of this custom HTTP verb. + string kind = 1; + + // The path matched by this custom verb. + string path = 2; +} diff --git a/packages/codegen/proto/google/protobuf/any.proto b/packages/codegen/proto/google/protobuf/any.proto new file mode 100644 index 00000000..4cf3843b --- /dev/null +++ b/packages/codegen/proto/google/protobuf/any.proto @@ -0,0 +1,155 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "types"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "AnyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// `Any` contains an arbitrary serialized protocol buffer message along with a +// URL that describes the type of the serialized message. +// +// Protobuf library provides support to pack/unpack Any values in the form +// of utility functions or additional generated methods of the Any type. +// +// Example 1: Pack and unpack a message in C++. +// +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } +// +// Example 2: Pack and unpack a message in Java. +// +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := ptypes.MarshalAny(foo) +// ... +// foo := &pb.Foo{} +// if err := ptypes.UnmarshalAny(any, foo); err != nil { +// ... +// } +// +// The pack methods provided by protobuf library will by default use +// 'type.googleapis.com/full.type.name' as the type URL and the unpack +// methods only use the fully qualified type name after the last '/' +// in the type URL, for example "foo.bar.com/x/y.z" will yield type +// name "y.z". +// +// +// JSON +// ==== +// The JSON representation of an `Any` value uses the regular +// representation of the deserialized, embedded message, with an +// additional field `@type` which contains the type URL. Example: +// +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } +// +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } +// +// If the embedded message type is well-known and has a custom JSON +// representation, that representation will be embedded adding a field +// `value` which holds the custom JSON in addition to the `@type` +// field. Example (for message [google.protobuf.Duration][]): +// +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// +message Any { + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + // + string type_url = 1; + + // Must be a valid serialized protocol buffer of the above specified type. + bytes value = 2; +} diff --git a/packages/codegen/proto/google/protobuf/descriptor.proto b/packages/codegen/proto/google/protobuf/descriptor.proto new file mode 100644 index 00000000..4a08905a --- /dev/null +++ b/packages/codegen/proto/google/protobuf/descriptor.proto @@ -0,0 +1,885 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + + +syntax = "proto2"; + +package google.protobuf; + +option go_package = "descriptor"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2" and "proto3". + optional string syntax = 12; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + } + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REQUIRED = 2; + LABEL_REPEATED = 3; + } + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; + + // Range of reserved numeric values. Reserved values may not be used by + // entries in the same enum. Reserved ranges may not overlap. + // + // Note that this is distinct from DescriptorProto.ReservedRange in that it + // is inclusive such that it can appropriately represent the entire int32 + // domain. + message EnumReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Inclusive. + } + + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + repeated EnumReservedRange reserved_range = 4; + + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + repeated string reserved_name = 5; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default = false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default = false]; +} + + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + optional string java_outer_classname = 8; + + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default = false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + optional bool java_string_check_utf8 = 27 [default = false]; + + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default = SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + + + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default = false]; + optional bool java_generic_services = 17 [default = false]; + optional bool py_generic_services = 18 [default = false]; + optional bool php_generic_services = 42 [default = false]; + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default = false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default = false]; + + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be + // used for determining the namespace. + optional string php_metadata_namespace = 44; + + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + optional string ruby_package = 45; + + + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. + // See the documentation for the "Options" section above. + extensions 1000 to max; + + //reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default = false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default = false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default = false]; + + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementations still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + optional bool map_entry = 7; + + //reserved 8; // javalite_serializable + //reserved 9; // javanano_as_lite + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + optional bool lazy = 5 [default = false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default = false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default = false]; + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + //reserved 4; // removed jtype +} + +message OneofOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default = false]; + + //reserved 5; // javanano_as_lite + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default = false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = 34 + [default = IDEMPOTENCY_UNKNOWN]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + // "foo.(bar.baz).qux". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendant. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed = true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed = true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to qux. + // // + // // Another line attached to qux. + // optional double qux = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to qux or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed = true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified offset. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + } +} diff --git a/packages/codegen/proto/google/protobuf/duration.proto b/packages/codegen/proto/google/protobuf/duration.proto new file mode 100644 index 00000000..b14bea5d --- /dev/null +++ b/packages/codegen/proto/google/protobuf/duration.proto @@ -0,0 +1,116 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; +option go_package = "types"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DurationProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// A Duration represents a signed, fixed-length span of time represented +// as a count of seconds and fractions of seconds at nanosecond +// resolution. It is independent of any calendar and concepts like "day" +// or "month". It is related to Timestamp in that the difference between +// two Timestamp values is a Duration and it can be added or subtracted +// from a Timestamp. Range is approximately +-10,000 years. +// +// # Examples +// +// Example 1: Compute Duration from two Timestamps in pseudo code. +// +// Timestamp start = ...; +// Timestamp end = ...; +// Duration duration = ...; +// +// duration.seconds = end.seconds - start.seconds; +// duration.nanos = end.nanos - start.nanos; +// +// if (duration.seconds < 0 && duration.nanos > 0) { +// duration.seconds += 1; +// duration.nanos -= 1000000000; +// } else if (durations.seconds > 0 && duration.nanos < 0) { +// duration.seconds -= 1; +// duration.nanos += 1000000000; +// } +// +// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. +// +// Timestamp start = ...; +// Duration duration = ...; +// Timestamp end = ...; +// +// end.seconds = start.seconds + duration.seconds; +// end.nanos = start.nanos + duration.nanos; +// +// if (end.nanos < 0) { +// end.seconds -= 1; +// end.nanos += 1000000000; +// } else if (end.nanos >= 1000000000) { +// end.seconds += 1; +// end.nanos -= 1000000000; +// } +// +// Example 3: Compute Duration from datetime.timedelta in Python. +// +// td = datetime.timedelta(days=3, minutes=10) +// duration = Duration() +// duration.FromTimedelta(td) +// +// # JSON Mapping +// +// In JSON format, the Duration type is encoded as a string rather than an +// object, where the string ends in the suffix "s" (indicating seconds) and +// is preceded by the number of seconds, with nanoseconds expressed as +// fractional seconds. For example, 3 seconds with 0 nanoseconds should be +// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should +// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 +// microsecond should be expressed in JSON format as "3.000001s". +// +// +message Duration { + // Signed seconds of the span of time. Must be from -315,576,000,000 + // to +315,576,000,000 inclusive. Note: these bounds are computed from: + // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + int64 seconds = 1; + + // Signed fractions of a second at nanosecond resolution of the span + // of time. Durations less than one second are represented with a 0 + // `seconds` field and a positive or negative `nanos` field. For durations + // of one second or more, a non-zero value for the `nanos` field must be + // of the same sign as the `seconds` field. Must be from -999,999,999 + // to +999,999,999 inclusive. + int32 nanos = 2; +} diff --git a/packages/codegen/proto/google/protobuf/empty.proto b/packages/codegen/proto/google/protobuf/empty.proto new file mode 100644 index 00000000..6057c852 --- /dev/null +++ b/packages/codegen/proto/google/protobuf/empty.proto @@ -0,0 +1,52 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "types"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "EmptyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// A generic empty message that you can re-use to avoid defining duplicated +// empty messages in your APIs. A typical example is to use it as the request +// or the response type of an API method. For instance: +// +// service Foo { +// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); +// } +// +// The JSON representation for `Empty` is empty JSON object `{}`. +message Empty {} diff --git a/packages/codegen/proto/google/protobuf/timestamp.proto b/packages/codegen/proto/google/protobuf/timestamp.proto new file mode 100644 index 00000000..0ebe36ea --- /dev/null +++ b/packages/codegen/proto/google/protobuf/timestamp.proto @@ -0,0 +1,138 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; +option go_package = "types"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "TimestampProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// A Timestamp represents a point in time independent of any time zone or local +// calendar, encoded as a count of seconds and fractions of seconds at +// nanosecond resolution. The count is relative to an epoch at UTC midnight on +// January 1, 1970, in the proleptic Gregorian calendar which extends the +// Gregorian calendar backwards to year one. +// +// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap +// second table is needed for interpretation, using a [24-hour linear +// smear](https://developers.google.com/time/smear). +// +// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By +// restricting to that range, we ensure that we can convert to and from [RFC +// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. +// +// # Examples +// +// Example 1: Compute Timestamp from POSIX `time()`. +// +// Timestamp timestamp; +// timestamp.set_seconds(time(NULL)); +// timestamp.set_nanos(0); +// +// Example 2: Compute Timestamp from POSIX `gettimeofday()`. +// +// struct timeval tv; +// gettimeofday(&tv, NULL); +// +// Timestamp timestamp; +// timestamp.set_seconds(tv.tv_sec); +// timestamp.set_nanos(tv.tv_usec * 1000); +// +// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. +// +// FILETIME ft; +// GetSystemTimeAsFileTime(&ft); +// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; +// +// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z +// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. +// Timestamp timestamp; +// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); +// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); +// +// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. +// +// long millis = System.currentTimeMillis(); +// +// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) +// .setNanos((int) ((millis % 1000) * 1000000)).build(); +// +// +// Example 5: Compute Timestamp from current time in Python. +// +// timestamp = Timestamp() +// timestamp.GetCurrentTime() +// +// # JSON Mapping +// +// In JSON format, the Timestamp type is encoded as a string in the +// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the +// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" +// where {year} is always expressed using four digits while {month}, {day}, +// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional +// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), +// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone +// is required. A proto3 JSON serializer should always use UTC (as indicated by +// "Z") when printing the Timestamp type and a proto3 JSON parser should be +// able to accept both UTC and other timezones (as indicated by an offset). +// +// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past +// 01:30 UTC on January 15, 2017. +// +// In JavaScript, one can convert a Date object to this format using the +// standard +// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) +// method. In Python, a standard `datetime.datetime` object can be converted +// to this format using +// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with +// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use +// the Joda Time's [`ISODateTimeFormat.dateTime()`]( +// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D +// ) to obtain a formatter capable of generating timestamps in this format. +// +// +message Timestamp { + // Represents seconds of UTC time since Unix epoch + // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + // 9999-12-31T23:59:59Z inclusive. + int64 seconds = 1; + + // Non-negative fractions of a second at nanosecond resolution. Negative + // second values with fractions must still have non-negative nanos values + // that count forward in time. Must be from 0 to 999,999,999 + // inclusive. + int32 nanos = 2; +} diff --git a/packages/codegen/proto/ibc/LICENSE b/packages/codegen/proto/ibc/LICENSE new file mode 100644 index 00000000..c04a16b3 --- /dev/null +++ b/packages/codegen/proto/ibc/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 COSMOS + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/packages/codegen/proto/ibc/README.md b/packages/codegen/proto/ibc/README.md new file mode 100644 index 00000000..e4ee70c7 --- /dev/null +++ b/packages/codegen/proto/ibc/README.md @@ -0,0 +1 @@ +# ibc \ No newline at end of file diff --git a/packages/codegen/proto/ibc/applications/transfer/v1/genesis.proto b/packages/codegen/proto/ibc/applications/transfer/v1/genesis.proto new file mode 100644 index 00000000..73d9fddd --- /dev/null +++ b/packages/codegen/proto/ibc/applications/transfer/v1/genesis.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; + +package ibc.applications.transfer.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/apps/transfer/types"; + +import "ibc/applications/transfer/v1/transfer.proto"; +import "gogoproto/gogo.proto"; + +// GenesisState defines the ibc-transfer genesis state +message GenesisState { + string port_id = 1 [(gogoproto.moretags) = "yaml:\"port_id\""]; + repeated DenomTrace denom_traces = 2 [ + (gogoproto.castrepeated) = "Traces", + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"denom_traces\"" + ]; + Params params = 3 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/ibc/applications/transfer/v1/query.proto b/packages/codegen/proto/ibc/applications/transfer/v1/query.proto new file mode 100644 index 00000000..f2faa87b --- /dev/null +++ b/packages/codegen/proto/ibc/applications/transfer/v1/query.proto @@ -0,0 +1,67 @@ +syntax = "proto3"; + +package ibc.applications.transfer.v1; + +import "gogoproto/gogo.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "ibc/applications/transfer/v1/transfer.proto"; +import "google/api/annotations.proto"; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/apps/transfer/types"; + +// Query provides defines the gRPC querier service. +service Query { + // DenomTrace queries a denomination trace information. + rpc DenomTrace(QueryDenomTraceRequest) returns (QueryDenomTraceResponse) { + option (google.api.http).get = "/ibc/apps/transfer/v1/denom_traces/{hash}"; + } + + // DenomTraces queries all denomination traces. + rpc DenomTraces(QueryDenomTracesRequest) returns (QueryDenomTracesResponse) { + option (google.api.http).get = "/ibc/apps/transfer/v1/denom_traces"; + } + + // Params queries all parameters of the ibc-transfer module. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/ibc/apps/transfer/v1/params"; + } +} + +// QueryDenomTraceRequest is the request type for the Query/DenomTrace RPC +// method +message QueryDenomTraceRequest { + // hash (in hex format) of the denomination trace information. + string hash = 1; +} + +// QueryDenomTraceResponse is the response type for the Query/DenomTrace RPC +// method. +message QueryDenomTraceResponse { + // denom_trace returns the requested denomination trace information. + DenomTrace denom_trace = 1; +} + +// QueryConnectionsRequest is the request type for the Query/DenomTraces RPC +// method +message QueryDenomTracesRequest { + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// QueryConnectionsResponse is the response type for the Query/DenomTraces RPC +// method. +message QueryDenomTracesResponse { + // denom_traces returns all denominations trace information. + repeated DenomTrace denom_traces = 1 [(gogoproto.castrepeated) = "Traces", (gogoproto.nullable) = false]; + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryParamsRequest is the request type for the Query/Params RPC method. +message QueryParamsRequest {} + +// QueryParamsResponse is the response type for the Query/Params RPC method. +message QueryParamsResponse { + // params defines the parameters of the module. + Params params = 1; +} diff --git a/packages/codegen/proto/ibc/applications/transfer/v1/transfer.proto b/packages/codegen/proto/ibc/applications/transfer/v1/transfer.proto new file mode 100644 index 00000000..10ce92f9 --- /dev/null +++ b/packages/codegen/proto/ibc/applications/transfer/v1/transfer.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; + +package ibc.applications.transfer.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/apps/transfer/types"; + +import "gogoproto/gogo.proto"; + +// DenomTrace contains the base denomination for ICS20 fungible tokens and the +// source tracing information path. +message DenomTrace { + // path defines the chain of port/channel identifiers used for tracing the + // source of the fungible token. + string path = 1; + // base denomination of the relayed fungible token. + string base_denom = 2; +} + +// Params defines the set of IBC transfer parameters. +// NOTE: To prevent a single token from being transferred, set the +// TransfersEnabled parameter to true and then set the bank module's SendEnabled +// parameter for the denomination to false. +message Params { + // send_enabled enables or disables all cross-chain token transfers from this + // chain. + bool send_enabled = 1 [(gogoproto.moretags) = "yaml:\"send_enabled\""]; + // receive_enabled enables or disables all cross-chain token transfers to this + // chain. + bool receive_enabled = 2 [(gogoproto.moretags) = "yaml:\"receive_enabled\""]; +} diff --git a/packages/codegen/proto/ibc/applications/transfer/v1/tx.proto b/packages/codegen/proto/ibc/applications/transfer/v1/tx.proto new file mode 100644 index 00000000..dfc480d0 --- /dev/null +++ b/packages/codegen/proto/ibc/applications/transfer/v1/tx.proto @@ -0,0 +1,44 @@ +syntax = "proto3"; + +package ibc.applications.transfer.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/apps/transfer/types"; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "ibc/core/client/v1/client.proto"; + +// Msg defines the ibc/transfer Msg service. +service Msg { + // Transfer defines a rpc handler method for MsgTransfer. + rpc Transfer(MsgTransfer) returns (MsgTransferResponse); +} + +// MsgTransfer defines a msg to transfer fungible tokens (i.e Coins) between +// ICS20 enabled chains. See ICS Spec here: +// https://github.com/cosmos/ibc/tree/master/spec/app/ics-020-fungible-token-transfer#data-structures +message MsgTransfer { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // the port on which the packet will be sent + string source_port = 1 [(gogoproto.moretags) = "yaml:\"source_port\""]; + // the channel by which the packet will be sent + string source_channel = 2 [(gogoproto.moretags) = "yaml:\"source_channel\""]; + // the tokens to be transferred + cosmos.base.v1beta1.Coin token = 3 [(gogoproto.nullable) = false]; + // the sender address + string sender = 4; + // the recipient address on the destination chain + string receiver = 5; + // Timeout height relative to the current block height. + // The timeout is disabled when set to 0. + ibc.core.client.v1.Height timeout_height = 6 + [(gogoproto.moretags) = "yaml:\"timeout_height\"", (gogoproto.nullable) = false]; + // Timeout timestamp (in nanoseconds) relative to the current block timestamp. + // The timeout is disabled when set to 0. + uint64 timeout_timestamp = 7 [(gogoproto.moretags) = "yaml:\"timeout_timestamp\""]; +} + +// MsgTransferResponse defines the Msg/Transfer response type. +message MsgTransferResponse {} diff --git a/packages/codegen/proto/ibc/applications/transfer/v2/packet.proto b/packages/codegen/proto/ibc/applications/transfer/v2/packet.proto new file mode 100644 index 00000000..593392a9 --- /dev/null +++ b/packages/codegen/proto/ibc/applications/transfer/v2/packet.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; + +package ibc.applications.transfer.v2; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/apps/transfer/types"; + +// FungibleTokenPacketData defines a struct for the packet payload +// See FungibleTokenPacketData spec: +// https://github.com/cosmos/ibc/tree/master/spec/app/ics-020-fungible-token-transfer#data-structures +message FungibleTokenPacketData { + // the token denomination to be transferred + string denom = 1; + // the token amount to be transferred + string amount = 2; + // the sender address + string sender = 3; + // the recipient address on the destination chain + string receiver = 4; +} diff --git a/packages/codegen/proto/ibc/core/channel/v1/channel.proto b/packages/codegen/proto/ibc/core/channel/v1/channel.proto new file mode 100644 index 00000000..c7f42dbf --- /dev/null +++ b/packages/codegen/proto/ibc/core/channel/v1/channel.proto @@ -0,0 +1,148 @@ +syntax = "proto3"; + +package ibc.core.channel.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/04-channel/types"; + +import "gogoproto/gogo.proto"; +import "ibc/core/client/v1/client.proto"; + +// Channel defines pipeline for exactly-once packet delivery between specific +// modules on separate blockchains, which has at least one end capable of +// sending packets and one end capable of receiving packets. +message Channel { + option (gogoproto.goproto_getters) = false; + + // current state of the channel end + State state = 1; + // whether the channel is ordered or unordered + Order ordering = 2; + // counterparty channel end + Counterparty counterparty = 3 [(gogoproto.nullable) = false]; + // list of connection identifiers, in order, along which packets sent on + // this channel will travel + repeated string connection_hops = 4 [(gogoproto.moretags) = "yaml:\"connection_hops\""]; + // opaque channel version, which is agreed upon during the handshake + string version = 5; +} + +// IdentifiedChannel defines a channel with additional port and channel +// identifier fields. +message IdentifiedChannel { + option (gogoproto.goproto_getters) = false; + + // current state of the channel end + State state = 1; + // whether the channel is ordered or unordered + Order ordering = 2; + // counterparty channel end + Counterparty counterparty = 3 [(gogoproto.nullable) = false]; + // list of connection identifiers, in order, along which packets sent on + // this channel will travel + repeated string connection_hops = 4 [(gogoproto.moretags) = "yaml:\"connection_hops\""]; + // opaque channel version, which is agreed upon during the handshake + string version = 5; + // port identifier + string port_id = 6; + // channel identifier + string channel_id = 7; +} + +// State defines if a channel is in one of the following states: +// CLOSED, INIT, TRYOPEN, OPEN or UNINITIALIZED. +enum State { + option (gogoproto.goproto_enum_prefix) = false; + + // Default State + STATE_UNINITIALIZED_UNSPECIFIED = 0 [(gogoproto.enumvalue_customname) = "UNINITIALIZED"]; + // A channel has just started the opening handshake. + STATE_INIT = 1 [(gogoproto.enumvalue_customname) = "INIT"]; + // A channel has acknowledged the handshake step on the counterparty chain. + STATE_TRYOPEN = 2 [(gogoproto.enumvalue_customname) = "TRYOPEN"]; + // A channel has completed the handshake. Open channels are + // ready to send and receive packets. + STATE_OPEN = 3 [(gogoproto.enumvalue_customname) = "OPEN"]; + // A channel has been closed and can no longer be used to send or receive + // packets. + STATE_CLOSED = 4 [(gogoproto.enumvalue_customname) = "CLOSED"]; +} + +// Order defines if a channel is ORDERED or UNORDERED +enum Order { + option (gogoproto.goproto_enum_prefix) = false; + + // zero-value for channel ordering + ORDER_NONE_UNSPECIFIED = 0 [(gogoproto.enumvalue_customname) = "NONE"]; + // packets can be delivered in any order, which may differ from the order in + // which they were sent. + ORDER_UNORDERED = 1 [(gogoproto.enumvalue_customname) = "UNORDERED"]; + // packets are delivered exactly in the order which they were sent + ORDER_ORDERED = 2 [(gogoproto.enumvalue_customname) = "ORDERED"]; +} + +// Counterparty defines a channel end counterparty +message Counterparty { + option (gogoproto.goproto_getters) = false; + + // port on the counterparty chain which owns the other end of the channel. + string port_id = 1 [(gogoproto.moretags) = "yaml:\"port_id\""]; + // channel end on the counterparty chain + string channel_id = 2 [(gogoproto.moretags) = "yaml:\"channel_id\""]; +} + +// Packet defines a type that carries data across different chains through IBC +message Packet { + option (gogoproto.goproto_getters) = false; + + // number corresponds to the order of sends and receives, where a Packet + // with an earlier sequence number must be sent and received before a Packet + // with a later sequence number. + uint64 sequence = 1; + // identifies the port on the sending chain. + string source_port = 2 [(gogoproto.moretags) = "yaml:\"source_port\""]; + // identifies the channel end on the sending chain. + string source_channel = 3 [(gogoproto.moretags) = "yaml:\"source_channel\""]; + // identifies the port on the receiving chain. + string destination_port = 4 [(gogoproto.moretags) = "yaml:\"destination_port\""]; + // identifies the channel end on the receiving chain. + string destination_channel = 5 [(gogoproto.moretags) = "yaml:\"destination_channel\""]; + // actual opaque bytes transferred directly to the application module + bytes data = 6; + // block height after which the packet times out + ibc.core.client.v1.Height timeout_height = 7 + [(gogoproto.moretags) = "yaml:\"timeout_height\"", (gogoproto.nullable) = false]; + // block timestamp (in nanoseconds) after which the packet times out + uint64 timeout_timestamp = 8 [(gogoproto.moretags) = "yaml:\"timeout_timestamp\""]; +} + +// PacketState defines the generic type necessary to retrieve and store +// packet commitments, acknowledgements, and receipts. +// Caller is responsible for knowing the context necessary to interpret this +// state as a commitment, acknowledgement, or a receipt. +message PacketState { + option (gogoproto.goproto_getters) = false; + + // channel port identifier. + string port_id = 1 [(gogoproto.moretags) = "yaml:\"port_id\""]; + // channel unique identifier. + string channel_id = 2 [(gogoproto.moretags) = "yaml:\"channel_id\""]; + // packet sequence. + uint64 sequence = 3; + // embedded data that represents packet state. + bytes data = 4; +} + +// Acknowledgement is the recommended acknowledgement format to be used by +// app-specific protocols. +// NOTE: The field numbers 21 and 22 were explicitly chosen to avoid accidental +// conflicts with other protobuf message formats used for acknowledgements. +// The first byte of any message with this format will be the non-ASCII values +// `0xaa` (result) or `0xb2` (error). Implemented as defined by ICS: +// https://github.com/cosmos/ibc/tree/master/spec/core/ics-004-channel-and-packet-semantics#acknowledgement-envelope +message Acknowledgement { + // response contains either a result or an error and must be non-empty + oneof response { + bytes result = 21; + string error = 22; + } +} diff --git a/packages/codegen/proto/ibc/core/channel/v1/genesis.proto b/packages/codegen/proto/ibc/core/channel/v1/genesis.proto new file mode 100644 index 00000000..38b57ed6 --- /dev/null +++ b/packages/codegen/proto/ibc/core/channel/v1/genesis.proto @@ -0,0 +1,32 @@ +syntax = "proto3"; + +package ibc.core.channel.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/04-channel/types"; + +import "gogoproto/gogo.proto"; +import "ibc/core/channel/v1/channel.proto"; + +// GenesisState defines the ibc channel submodule's genesis state. +message GenesisState { + repeated IdentifiedChannel channels = 1 [(gogoproto.casttype) = "IdentifiedChannel", (gogoproto.nullable) = false]; + repeated PacketState acknowledgements = 2 [(gogoproto.nullable) = false]; + repeated PacketState commitments = 3 [(gogoproto.nullable) = false]; + repeated PacketState receipts = 4 [(gogoproto.nullable) = false]; + repeated PacketSequence send_sequences = 5 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"send_sequences\""]; + repeated PacketSequence recv_sequences = 6 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"recv_sequences\""]; + repeated PacketSequence ack_sequences = 7 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"ack_sequences\""]; + // the sequence for the next generated channel identifier + uint64 next_channel_sequence = 8 [(gogoproto.moretags) = "yaml:\"next_channel_sequence\""]; +} + +// PacketSequence defines the genesis type necessary to retrieve and store +// next send and receive sequences. +message PacketSequence { + string port_id = 1 [(gogoproto.moretags) = "yaml:\"port_id\""]; + string channel_id = 2 [(gogoproto.moretags) = "yaml:\"channel_id\""]; + uint64 sequence = 3; +} diff --git a/packages/codegen/proto/ibc/core/channel/v1/query.proto b/packages/codegen/proto/ibc/core/channel/v1/query.proto new file mode 100644 index 00000000..212cb645 --- /dev/null +++ b/packages/codegen/proto/ibc/core/channel/v1/query.proto @@ -0,0 +1,376 @@ +syntax = "proto3"; + +package ibc.core.channel.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/04-channel/types"; + +import "ibc/core/client/v1/client.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "ibc/core/channel/v1/channel.proto"; +import "google/api/annotations.proto"; +import "google/protobuf/any.proto"; +import "gogoproto/gogo.proto"; + +// Query provides defines the gRPC querier service +service Query { + // Channel queries an IBC Channel. + rpc Channel(QueryChannelRequest) returns (QueryChannelResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/channels/{channel_id}/ports/{port_id}"; + } + + // Channels queries all the IBC channels of a chain. + rpc Channels(QueryChannelsRequest) returns (QueryChannelsResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/channels"; + } + + // ConnectionChannels queries all the channels associated with a connection + // end. + rpc ConnectionChannels(QueryConnectionChannelsRequest) returns (QueryConnectionChannelsResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/connections/{connection}/channels"; + } + + // ChannelClientState queries for the client state for the channel associated + // with the provided channel identifiers. + rpc ChannelClientState(QueryChannelClientStateRequest) returns (QueryChannelClientStateResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/channels/{channel_id}/" + "ports/{port_id}/client_state"; + } + + // ChannelConsensusState queries for the consensus state for the channel + // associated with the provided channel identifiers. + rpc ChannelConsensusState(QueryChannelConsensusStateRequest) returns (QueryChannelConsensusStateResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/channels/{channel_id}/" + "ports/{port_id}/consensus_state/revision/" + "{revision_number}/height/{revision_height}"; + } + + // PacketCommitment queries a stored packet commitment hash. + rpc PacketCommitment(QueryPacketCommitmentRequest) returns (QueryPacketCommitmentResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/channels/{channel_id}/ports/{port_id}/" + "packet_commitments/{sequence}"; + } + + // PacketCommitments returns all the packet commitments hashes associated + // with a channel. + rpc PacketCommitments(QueryPacketCommitmentsRequest) returns (QueryPacketCommitmentsResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/channels/{channel_id}/" + "ports/{port_id}/packet_commitments"; + } + + // PacketReceipt queries if a given packet sequence has been received on the + // queried chain + rpc PacketReceipt(QueryPacketReceiptRequest) returns (QueryPacketReceiptResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/channels/{channel_id}/" + "ports/{port_id}/packet_receipts/{sequence}"; + } + + // PacketAcknowledgement queries a stored packet acknowledgement hash. + rpc PacketAcknowledgement(QueryPacketAcknowledgementRequest) returns (QueryPacketAcknowledgementResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/channels/{channel_id}/" + "ports/{port_id}/packet_acks/{sequence}"; + } + + // PacketAcknowledgements returns all the packet acknowledgements associated + // with a channel. + rpc PacketAcknowledgements(QueryPacketAcknowledgementsRequest) returns (QueryPacketAcknowledgementsResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/channels/{channel_id}/" + "ports/{port_id}/packet_acknowledgements"; + } + + // UnreceivedPackets returns all the unreceived IBC packets associated with a + // channel and sequences. + rpc UnreceivedPackets(QueryUnreceivedPacketsRequest) returns (QueryUnreceivedPacketsResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/channels/{channel_id}/ports/{port_id}/" + "packet_commitments/" + "{packet_commitment_sequences}/unreceived_packets"; + } + + // UnreceivedAcks returns all the unreceived IBC acknowledgements associated + // with a channel and sequences. + rpc UnreceivedAcks(QueryUnreceivedAcksRequest) returns (QueryUnreceivedAcksResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/channels/{channel_id}/" + "ports/{port_id}/packet_commitments/" + "{packet_ack_sequences}/unreceived_acks"; + } + + // NextSequenceReceive returns the next receive sequence for a given channel. + rpc NextSequenceReceive(QueryNextSequenceReceiveRequest) returns (QueryNextSequenceReceiveResponse) { + option (google.api.http).get = "/ibc/core/channel/v1/channels/{channel_id}/" + "ports/{port_id}/next_sequence"; + } +} + +// QueryChannelRequest is the request type for the Query/Channel RPC method +message QueryChannelRequest { + // port unique identifier + string port_id = 1; + // channel unique identifier + string channel_id = 2; +} + +// QueryChannelResponse is the response type for the Query/Channel RPC method. +// Besides the Channel end, it includes a proof and the height from which the +// proof was retrieved. +message QueryChannelResponse { + // channel associated with the request identifiers + ibc.core.channel.v1.Channel channel = 1; + // merkle proof of existence + bytes proof = 2; + // height at which the proof was retrieved + ibc.core.client.v1.Height proof_height = 3 [(gogoproto.nullable) = false]; +} + +// QueryChannelsRequest is the request type for the Query/Channels RPC method +message QueryChannelsRequest { + // pagination request + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// QueryChannelsResponse is the response type for the Query/Channels RPC method. +message QueryChannelsResponse { + // list of stored channels of the chain. + repeated ibc.core.channel.v1.IdentifiedChannel channels = 1; + // pagination response + cosmos.base.query.v1beta1.PageResponse pagination = 2; + // query block height + ibc.core.client.v1.Height height = 3 [(gogoproto.nullable) = false]; +} + +// QueryConnectionChannelsRequest is the request type for the +// Query/QueryConnectionChannels RPC method +message QueryConnectionChannelsRequest { + // connection unique identifier + string connection = 1; + // pagination request + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryConnectionChannelsResponse is the Response type for the +// Query/QueryConnectionChannels RPC method +message QueryConnectionChannelsResponse { + // list of channels associated with a connection. + repeated ibc.core.channel.v1.IdentifiedChannel channels = 1; + // pagination response + cosmos.base.query.v1beta1.PageResponse pagination = 2; + // query block height + ibc.core.client.v1.Height height = 3 [(gogoproto.nullable) = false]; +} + +// QueryChannelClientStateRequest is the request type for the Query/ClientState +// RPC method +message QueryChannelClientStateRequest { + // port unique identifier + string port_id = 1; + // channel unique identifier + string channel_id = 2; +} + +// QueryChannelClientStateResponse is the Response type for the +// Query/QueryChannelClientState RPC method +message QueryChannelClientStateResponse { + // client state associated with the channel + ibc.core.client.v1.IdentifiedClientState identified_client_state = 1; + // merkle proof of existence + bytes proof = 2; + // height at which the proof was retrieved + ibc.core.client.v1.Height proof_height = 3 [(gogoproto.nullable) = false]; +} + +// QueryChannelConsensusStateRequest is the request type for the +// Query/ConsensusState RPC method +message QueryChannelConsensusStateRequest { + // port unique identifier + string port_id = 1; + // channel unique identifier + string channel_id = 2; + // revision number of the consensus state + uint64 revision_number = 3; + // revision height of the consensus state + uint64 revision_height = 4; +} + +// QueryChannelClientStateResponse is the Response type for the +// Query/QueryChannelClientState RPC method +message QueryChannelConsensusStateResponse { + // consensus state associated with the channel + google.protobuf.Any consensus_state = 1; + // client ID associated with the consensus state + string client_id = 2; + // merkle proof of existence + bytes proof = 3; + // height at which the proof was retrieved + ibc.core.client.v1.Height proof_height = 4 [(gogoproto.nullable) = false]; +} + +// QueryPacketCommitmentRequest is the request type for the +// Query/PacketCommitment RPC method +message QueryPacketCommitmentRequest { + // port unique identifier + string port_id = 1; + // channel unique identifier + string channel_id = 2; + // packet sequence + uint64 sequence = 3; +} + +// QueryPacketCommitmentResponse defines the client query response for a packet +// which also includes a proof and the height from which the proof was +// retrieved +message QueryPacketCommitmentResponse { + // packet associated with the request fields + bytes commitment = 1; + // merkle proof of existence + bytes proof = 2; + // height at which the proof was retrieved + ibc.core.client.v1.Height proof_height = 3 [(gogoproto.nullable) = false]; +} + +// QueryPacketCommitmentsRequest is the request type for the +// Query/QueryPacketCommitments RPC method +message QueryPacketCommitmentsRequest { + // port unique identifier + string port_id = 1; + // channel unique identifier + string channel_id = 2; + // pagination request + cosmos.base.query.v1beta1.PageRequest pagination = 3; +} + +// QueryPacketCommitmentsResponse is the request type for the +// Query/QueryPacketCommitments RPC method +message QueryPacketCommitmentsResponse { + repeated ibc.core.channel.v1.PacketState commitments = 1; + // pagination response + cosmos.base.query.v1beta1.PageResponse pagination = 2; + // query block height + ibc.core.client.v1.Height height = 3 [(gogoproto.nullable) = false]; +} + +// QueryPacketReceiptRequest is the request type for the +// Query/PacketReceipt RPC method +message QueryPacketReceiptRequest { + // port unique identifier + string port_id = 1; + // channel unique identifier + string channel_id = 2; + // packet sequence + uint64 sequence = 3; +} + +// QueryPacketReceiptResponse defines the client query response for a packet +// receipt which also includes a proof, and the height from which the proof was +// retrieved +message QueryPacketReceiptResponse { + // success flag for if receipt exists + bool received = 2; + // merkle proof of existence + bytes proof = 3; + // height at which the proof was retrieved + ibc.core.client.v1.Height proof_height = 4 [(gogoproto.nullable) = false]; +} + +// QueryPacketAcknowledgementRequest is the request type for the +// Query/PacketAcknowledgement RPC method +message QueryPacketAcknowledgementRequest { + // port unique identifier + string port_id = 1; + // channel unique identifier + string channel_id = 2; + // packet sequence + uint64 sequence = 3; +} + +// QueryPacketAcknowledgementResponse defines the client query response for a +// packet which also includes a proof and the height from which the +// proof was retrieved +message QueryPacketAcknowledgementResponse { + // packet associated with the request fields + bytes acknowledgement = 1; + // merkle proof of existence + bytes proof = 2; + // height at which the proof was retrieved + ibc.core.client.v1.Height proof_height = 3 [(gogoproto.nullable) = false]; +} + +// QueryPacketAcknowledgementsRequest is the request type for the +// Query/QueryPacketCommitments RPC method +message QueryPacketAcknowledgementsRequest { + // port unique identifier + string port_id = 1; + // channel unique identifier + string channel_id = 2; + // pagination request + cosmos.base.query.v1beta1.PageRequest pagination = 3; + // list of packet sequences + repeated uint64 packet_commitment_sequences = 4; +} + +// QueryPacketAcknowledgemetsResponse is the request type for the +// Query/QueryPacketAcknowledgements RPC method +message QueryPacketAcknowledgementsResponse { + repeated ibc.core.channel.v1.PacketState acknowledgements = 1; + // pagination response + cosmos.base.query.v1beta1.PageResponse pagination = 2; + // query block height + ibc.core.client.v1.Height height = 3 [(gogoproto.nullable) = false]; +} + +// QueryUnreceivedPacketsRequest is the request type for the +// Query/UnreceivedPackets RPC method +message QueryUnreceivedPacketsRequest { + // port unique identifier + string port_id = 1; + // channel unique identifier + string channel_id = 2; + // list of packet sequences + repeated uint64 packet_commitment_sequences = 3; +} + +// QueryUnreceivedPacketsResponse is the response type for the +// Query/UnreceivedPacketCommitments RPC method +message QueryUnreceivedPacketsResponse { + // list of unreceived packet sequences + repeated uint64 sequences = 1; + // query block height + ibc.core.client.v1.Height height = 2 [(gogoproto.nullable) = false]; +} + +// QueryUnreceivedAcks is the request type for the +// Query/UnreceivedAcks RPC method +message QueryUnreceivedAcksRequest { + // port unique identifier + string port_id = 1; + // channel unique identifier + string channel_id = 2; + // list of acknowledgement sequences + repeated uint64 packet_ack_sequences = 3; +} + +// QueryUnreceivedAcksResponse is the response type for the +// Query/UnreceivedAcks RPC method +message QueryUnreceivedAcksResponse { + // list of unreceived acknowledgement sequences + repeated uint64 sequences = 1; + // query block height + ibc.core.client.v1.Height height = 2 [(gogoproto.nullable) = false]; +} + +// QueryNextSequenceReceiveRequest is the request type for the +// Query/QueryNextSequenceReceiveRequest RPC method +message QueryNextSequenceReceiveRequest { + // port unique identifier + string port_id = 1; + // channel unique identifier + string channel_id = 2; +} + +// QuerySequenceResponse is the request type for the +// Query/QueryNextSequenceReceiveResponse RPC method +message QueryNextSequenceReceiveResponse { + // next sequence receive number + uint64 next_sequence_receive = 1; + // merkle proof of existence + bytes proof = 2; + // height at which the proof was retrieved + ibc.core.client.v1.Height proof_height = 3 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/ibc/core/channel/v1/tx.proto b/packages/codegen/proto/ibc/core/channel/v1/tx.proto new file mode 100644 index 00000000..dab45080 --- /dev/null +++ b/packages/codegen/proto/ibc/core/channel/v1/tx.proto @@ -0,0 +1,211 @@ +syntax = "proto3"; + +package ibc.core.channel.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/04-channel/types"; + +import "gogoproto/gogo.proto"; +import "ibc/core/client/v1/client.proto"; +import "ibc/core/channel/v1/channel.proto"; + +// Msg defines the ibc/channel Msg service. +service Msg { + // ChannelOpenInit defines a rpc handler method for MsgChannelOpenInit. + rpc ChannelOpenInit(MsgChannelOpenInit) returns (MsgChannelOpenInitResponse); + + // ChannelOpenTry defines a rpc handler method for MsgChannelOpenTry. + rpc ChannelOpenTry(MsgChannelOpenTry) returns (MsgChannelOpenTryResponse); + + // ChannelOpenAck defines a rpc handler method for MsgChannelOpenAck. + rpc ChannelOpenAck(MsgChannelOpenAck) returns (MsgChannelOpenAckResponse); + + // ChannelOpenConfirm defines a rpc handler method for MsgChannelOpenConfirm. + rpc ChannelOpenConfirm(MsgChannelOpenConfirm) returns (MsgChannelOpenConfirmResponse); + + // ChannelCloseInit defines a rpc handler method for MsgChannelCloseInit. + rpc ChannelCloseInit(MsgChannelCloseInit) returns (MsgChannelCloseInitResponse); + + // ChannelCloseConfirm defines a rpc handler method for + // MsgChannelCloseConfirm. + rpc ChannelCloseConfirm(MsgChannelCloseConfirm) returns (MsgChannelCloseConfirmResponse); + + // RecvPacket defines a rpc handler method for MsgRecvPacket. + rpc RecvPacket(MsgRecvPacket) returns (MsgRecvPacketResponse); + + // Timeout defines a rpc handler method for MsgTimeout. + rpc Timeout(MsgTimeout) returns (MsgTimeoutResponse); + + // TimeoutOnClose defines a rpc handler method for MsgTimeoutOnClose. + rpc TimeoutOnClose(MsgTimeoutOnClose) returns (MsgTimeoutOnCloseResponse); + + // Acknowledgement defines a rpc handler method for MsgAcknowledgement. + rpc Acknowledgement(MsgAcknowledgement) returns (MsgAcknowledgementResponse); +} + +// MsgChannelOpenInit defines an sdk.Msg to initialize a channel handshake. It +// is called by a relayer on Chain A. +message MsgChannelOpenInit { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string port_id = 1 [(gogoproto.moretags) = "yaml:\"port_id\""]; + Channel channel = 2 [(gogoproto.nullable) = false]; + string signer = 3; +} + +// MsgChannelOpenInitResponse defines the Msg/ChannelOpenInit response type. +message MsgChannelOpenInitResponse {} + +// MsgChannelOpenInit defines a msg sent by a Relayer to try to open a channel +// on Chain B. +message MsgChannelOpenTry { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string port_id = 1 [(gogoproto.moretags) = "yaml:\"port_id\""]; + // in the case of crossing hello's, when both chains call OpenInit, we need + // the channel identifier of the previous channel in state INIT + string previous_channel_id = 2 [(gogoproto.moretags) = "yaml:\"previous_channel_id\""]; + Channel channel = 3 [(gogoproto.nullable) = false]; + string counterparty_version = 4 [(gogoproto.moretags) = "yaml:\"counterparty_version\""]; + bytes proof_init = 5 [(gogoproto.moretags) = "yaml:\"proof_init\""]; + ibc.core.client.v1.Height proof_height = 6 + [(gogoproto.moretags) = "yaml:\"proof_height\"", (gogoproto.nullable) = false]; + string signer = 7; +} + +// MsgChannelOpenTryResponse defines the Msg/ChannelOpenTry response type. +message MsgChannelOpenTryResponse {} + +// MsgChannelOpenAck defines a msg sent by a Relayer to Chain A to acknowledge +// the change of channel state to TRYOPEN on Chain B. +message MsgChannelOpenAck { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string port_id = 1 [(gogoproto.moretags) = "yaml:\"port_id\""]; + string channel_id = 2 [(gogoproto.moretags) = "yaml:\"channel_id\""]; + string counterparty_channel_id = 3 [(gogoproto.moretags) = "yaml:\"counterparty_channel_id\""]; + string counterparty_version = 4 [(gogoproto.moretags) = "yaml:\"counterparty_version\""]; + bytes proof_try = 5 [(gogoproto.moretags) = "yaml:\"proof_try\""]; + ibc.core.client.v1.Height proof_height = 6 + [(gogoproto.moretags) = "yaml:\"proof_height\"", (gogoproto.nullable) = false]; + string signer = 7; +} + +// MsgChannelOpenAckResponse defines the Msg/ChannelOpenAck response type. +message MsgChannelOpenAckResponse {} + +// MsgChannelOpenConfirm defines a msg sent by a Relayer to Chain B to +// acknowledge the change of channel state to OPEN on Chain A. +message MsgChannelOpenConfirm { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string port_id = 1 [(gogoproto.moretags) = "yaml:\"port_id\""]; + string channel_id = 2 [(gogoproto.moretags) = "yaml:\"channel_id\""]; + bytes proof_ack = 3 [(gogoproto.moretags) = "yaml:\"proof_ack\""]; + ibc.core.client.v1.Height proof_height = 4 + [(gogoproto.moretags) = "yaml:\"proof_height\"", (gogoproto.nullable) = false]; + string signer = 5; +} + +// MsgChannelOpenConfirmResponse defines the Msg/ChannelOpenConfirm response +// type. +message MsgChannelOpenConfirmResponse {} + +// MsgChannelCloseInit defines a msg sent by a Relayer to Chain A +// to close a channel with Chain B. +message MsgChannelCloseInit { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string port_id = 1 [(gogoproto.moretags) = "yaml:\"port_id\""]; + string channel_id = 2 [(gogoproto.moretags) = "yaml:\"channel_id\""]; + string signer = 3; +} + +// MsgChannelCloseInitResponse defines the Msg/ChannelCloseInit response type. +message MsgChannelCloseInitResponse {} + +// MsgChannelCloseConfirm defines a msg sent by a Relayer to Chain B +// to acknowledge the change of channel state to CLOSED on Chain A. +message MsgChannelCloseConfirm { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string port_id = 1 [(gogoproto.moretags) = "yaml:\"port_id\""]; + string channel_id = 2 [(gogoproto.moretags) = "yaml:\"channel_id\""]; + bytes proof_init = 3 [(gogoproto.moretags) = "yaml:\"proof_init\""]; + ibc.core.client.v1.Height proof_height = 4 + [(gogoproto.moretags) = "yaml:\"proof_height\"", (gogoproto.nullable) = false]; + string signer = 5; +} + +// MsgChannelCloseConfirmResponse defines the Msg/ChannelCloseConfirm response +// type. +message MsgChannelCloseConfirmResponse {} + +// MsgRecvPacket receives incoming IBC packet +message MsgRecvPacket { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + Packet packet = 1 [(gogoproto.nullable) = false]; + bytes proof_commitment = 2 [(gogoproto.moretags) = "yaml:\"proof_commitment\""]; + ibc.core.client.v1.Height proof_height = 3 + [(gogoproto.moretags) = "yaml:\"proof_height\"", (gogoproto.nullable) = false]; + string signer = 4; +} + +// MsgRecvPacketResponse defines the Msg/RecvPacket response type. +message MsgRecvPacketResponse {} + +// MsgTimeout receives timed-out packet +message MsgTimeout { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + Packet packet = 1 [(gogoproto.nullable) = false]; + bytes proof_unreceived = 2 [(gogoproto.moretags) = "yaml:\"proof_unreceived\""]; + ibc.core.client.v1.Height proof_height = 3 + [(gogoproto.moretags) = "yaml:\"proof_height\"", (gogoproto.nullable) = false]; + uint64 next_sequence_recv = 4 [(gogoproto.moretags) = "yaml:\"next_sequence_recv\""]; + string signer = 5; +} + +// MsgTimeoutResponse defines the Msg/Timeout response type. +message MsgTimeoutResponse {} + +// MsgTimeoutOnClose timed-out packet upon counterparty channel closure. +message MsgTimeoutOnClose { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + Packet packet = 1 [(gogoproto.nullable) = false]; + bytes proof_unreceived = 2 [(gogoproto.moretags) = "yaml:\"proof_unreceived\""]; + bytes proof_close = 3 [(gogoproto.moretags) = "yaml:\"proof_close\""]; + ibc.core.client.v1.Height proof_height = 4 + [(gogoproto.moretags) = "yaml:\"proof_height\"", (gogoproto.nullable) = false]; + uint64 next_sequence_recv = 5 [(gogoproto.moretags) = "yaml:\"next_sequence_recv\""]; + string signer = 6; +} + +// MsgTimeoutOnCloseResponse defines the Msg/TimeoutOnClose response type. +message MsgTimeoutOnCloseResponse {} + +// MsgAcknowledgement receives incoming IBC acknowledgement +message MsgAcknowledgement { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + Packet packet = 1 [(gogoproto.nullable) = false]; + bytes acknowledgement = 2; + bytes proof_acked = 3 [(gogoproto.moretags) = "yaml:\"proof_acked\""]; + ibc.core.client.v1.Height proof_height = 4 + [(gogoproto.moretags) = "yaml:\"proof_height\"", (gogoproto.nullable) = false]; + string signer = 5; +} + +// MsgAcknowledgementResponse defines the Msg/Acknowledgement response type. +message MsgAcknowledgementResponse {} diff --git a/packages/codegen/proto/ibc/core/client/v1/client.proto b/packages/codegen/proto/ibc/core/client/v1/client.proto new file mode 100644 index 00000000..f0a1538e --- /dev/null +++ b/packages/codegen/proto/ibc/core/client/v1/client.proto @@ -0,0 +1,104 @@ +syntax = "proto3"; + +package ibc.core.client.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/02-client/types"; + +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; +import "cosmos/upgrade/v1beta1/upgrade.proto"; +import "cosmos_proto/cosmos.proto"; + +// IdentifiedClientState defines a client state with an additional client +// identifier field. +message IdentifiedClientState { + // client identifier + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + // client state + google.protobuf.Any client_state = 2 [(gogoproto.moretags) = "yaml:\"client_state\""]; +} + +// ConsensusStateWithHeight defines a consensus state with an additional height +// field. +message ConsensusStateWithHeight { + // consensus state height + Height height = 1 [(gogoproto.nullable) = false]; + // consensus state + google.protobuf.Any consensus_state = 2 [(gogoproto.moretags) = "yaml\"consensus_state\""]; +} + +// ClientConsensusStates defines all the stored consensus states for a given +// client. +message ClientConsensusStates { + // client identifier + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + // consensus states and their heights associated with the client + repeated ConsensusStateWithHeight consensus_states = 2 + [(gogoproto.moretags) = "yaml:\"consensus_states\"", (gogoproto.nullable) = false]; +} + +// ClientUpdateProposal is a governance proposal. If it passes, the substitute +// client's latest consensus state is copied over to the subject client. The proposal +// handler may fail if the subject and the substitute do not match in client and +// chain parameters (with exception to latest height, frozen height, and chain-id). +message ClientUpdateProposal { + option (gogoproto.goproto_getters) = false; + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + // the title of the update proposal + string title = 1; + // the description of the proposal + string description = 2; + // the client identifier for the client to be updated if the proposal passes + string subject_client_id = 3 [(gogoproto.moretags) = "yaml:\"subject_client_id\""]; + // the substitute client identifier for the client standing in for the subject + // client + string substitute_client_id = 4 [(gogoproto.moretags) = "yaml:\"substitute_client_id\""]; +} + +// UpgradeProposal is a gov Content type for initiating an IBC breaking +// upgrade. +message UpgradeProposal { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + option (gogoproto.equal) = true; + option (cosmos_proto.implements_interface) = "cosmos.gov.v1beta1.Content"; + + string title = 1; + string description = 2; + cosmos.upgrade.v1beta1.Plan plan = 3 [(gogoproto.nullable) = false]; + + // An UpgradedClientState must be provided to perform an IBC breaking upgrade. + // This will make the chain commit to the correct upgraded (self) client state + // before the upgrade occurs, so that connecting chains can verify that the + // new upgraded client is valid by verifying a proof on the previous version + // of the chain. This will allow IBC connections to persist smoothly across + // planned chain upgrades + google.protobuf.Any upgraded_client_state = 4 [(gogoproto.moretags) = "yaml:\"upgraded_client_state\""]; +} + +// Height is a monotonically increasing data type +// that can be compared against another Height for the purposes of updating and +// freezing clients +// +// Normally the RevisionHeight is incremented at each height while keeping +// RevisionNumber the same. However some consensus algorithms may choose to +// reset the height in certain conditions e.g. hard forks, state-machine +// breaking changes In these cases, the RevisionNumber is incremented so that +// height continues to be monitonically increasing even as the RevisionHeight +// gets reset +message Height { + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; + + // the revision that the client is currently on + uint64 revision_number = 1 [(gogoproto.moretags) = "yaml:\"revision_number\""]; + // the height within the given revision + uint64 revision_height = 2 [(gogoproto.moretags) = "yaml:\"revision_height\""]; +} + +// Params defines the set of IBC light client parameters. +message Params { + // allowed_clients defines the list of allowed client state types. + repeated string allowed_clients = 1 [(gogoproto.moretags) = "yaml:\"allowed_clients\""]; +} diff --git a/packages/codegen/proto/ibc/core/client/v1/genesis.proto b/packages/codegen/proto/ibc/core/client/v1/genesis.proto new file mode 100644 index 00000000..6668f2ca --- /dev/null +++ b/packages/codegen/proto/ibc/core/client/v1/genesis.proto @@ -0,0 +1,48 @@ +syntax = "proto3"; + +package ibc.core.client.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/02-client/types"; + +import "ibc/core/client/v1/client.proto"; +import "gogoproto/gogo.proto"; + +// GenesisState defines the ibc client submodule's genesis state. +message GenesisState { + // client states with their corresponding identifiers + repeated IdentifiedClientState clients = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "IdentifiedClientStates"]; + // consensus states from each client + repeated ClientConsensusStates clients_consensus = 2 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "ClientsConsensusStates", + (gogoproto.moretags) = "yaml:\"clients_consensus\"" + ]; + // metadata from each client + repeated IdentifiedGenesisMetadata clients_metadata = 3 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"clients_metadata\""]; + Params params = 4 [(gogoproto.nullable) = false]; + // create localhost on initialization + bool create_localhost = 5 [(gogoproto.moretags) = "yaml:\"create_localhost\""]; + // the sequence for the next generated client identifier + uint64 next_client_sequence = 6 [(gogoproto.moretags) = "yaml:\"next_client_sequence\""]; +} + +// GenesisMetadata defines the genesis type for metadata that clients may return +// with ExportMetadata +message GenesisMetadata { + option (gogoproto.goproto_getters) = false; + + // store key of metadata without clientID-prefix + bytes key = 1; + // metadata value + bytes value = 2; +} + +// IdentifiedGenesisMetadata has the client metadata with the corresponding +// client id. +message IdentifiedGenesisMetadata { + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + repeated GenesisMetadata client_metadata = 2 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"client_metadata\""]; +} diff --git a/packages/codegen/proto/ibc/core/client/v1/query.proto b/packages/codegen/proto/ibc/core/client/v1/query.proto new file mode 100644 index 00000000..b6f8eb47 --- /dev/null +++ b/packages/codegen/proto/ibc/core/client/v1/query.proto @@ -0,0 +1,184 @@ +syntax = "proto3"; + +package ibc.core.client.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/02-client/types"; + +import "cosmos/base/query/v1beta1/pagination.proto"; +import "ibc/core/client/v1/client.proto"; +import "google/protobuf/any.proto"; +import "google/api/annotations.proto"; +import "gogoproto/gogo.proto"; + +// Query provides defines the gRPC querier service +service Query { + // ClientState queries an IBC light client. + rpc ClientState(QueryClientStateRequest) returns (QueryClientStateResponse) { + option (google.api.http).get = "/ibc/core/client/v1/client_states/{client_id}"; + } + + // ClientStates queries all the IBC light clients of a chain. + rpc ClientStates(QueryClientStatesRequest) returns (QueryClientStatesResponse) { + option (google.api.http).get = "/ibc/core/client/v1/client_states"; + } + + // ConsensusState queries a consensus state associated with a client state at + // a given height. + rpc ConsensusState(QueryConsensusStateRequest) returns (QueryConsensusStateResponse) { + option (google.api.http).get = "/ibc/core/client/v1/consensus_states/" + "{client_id}/revision/{revision_number}/" + "height/{revision_height}"; + } + + // ConsensusStates queries all the consensus state associated with a given + // client. + rpc ConsensusStates(QueryConsensusStatesRequest) returns (QueryConsensusStatesResponse) { + option (google.api.http).get = "/ibc/core/client/v1/consensus_states/{client_id}"; + } + + // Status queries the status of an IBC client. + rpc ClientStatus(QueryClientStatusRequest) returns (QueryClientStatusResponse) { + option (google.api.http).get = "/ibc/core/client/v1/client_status/{client_id}"; + } + + // ClientParams queries all parameters of the ibc client. + rpc ClientParams(QueryClientParamsRequest) returns (QueryClientParamsResponse) { + option (google.api.http).get = "/ibc/client/v1/params"; + } + + // UpgradedClientState queries an Upgraded IBC light client. + rpc UpgradedClientState(QueryUpgradedClientStateRequest) returns (QueryUpgradedClientStateResponse) { + option (google.api.http).get = "/ibc/core/client/v1/upgraded_client_states"; + } + + // UpgradedConsensusState queries an Upgraded IBC consensus state. + rpc UpgradedConsensusState(QueryUpgradedConsensusStateRequest) returns (QueryUpgradedConsensusStateResponse) { + option (google.api.http).get = "/ibc/core/client/v1/upgraded_consensus_states"; + } +} + +// QueryClientStateRequest is the request type for the Query/ClientState RPC +// method +message QueryClientStateRequest { + // client state unique identifier + string client_id = 1; +} + +// QueryClientStateResponse is the response type for the Query/ClientState RPC +// method. Besides the client state, it includes a proof and the height from +// which the proof was retrieved. +message QueryClientStateResponse { + // client state associated with the request identifier + google.protobuf.Any client_state = 1; + // merkle proof of existence + bytes proof = 2; + // height at which the proof was retrieved + ibc.core.client.v1.Height proof_height = 3 [(gogoproto.nullable) = false]; +} + +// QueryClientStatesRequest is the request type for the Query/ClientStates RPC +// method +message QueryClientStatesRequest { + // pagination request + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// QueryClientStatesResponse is the response type for the Query/ClientStates RPC +// method. +message QueryClientStatesResponse { + // list of stored ClientStates of the chain. + repeated IdentifiedClientState client_states = 1 + [(gogoproto.nullable) = false, (gogoproto.castrepeated) = "IdentifiedClientStates"]; + // pagination response + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryConsensusStateRequest is the request type for the Query/ConsensusState +// RPC method. Besides the consensus state, it includes a proof and the height +// from which the proof was retrieved. +message QueryConsensusStateRequest { + // client identifier + string client_id = 1; + // consensus state revision number + uint64 revision_number = 2; + // consensus state revision height + uint64 revision_height = 3; + // latest_height overrrides the height field and queries the latest stored + // ConsensusState + bool latest_height = 4; +} + +// QueryConsensusStateResponse is the response type for the Query/ConsensusState +// RPC method +message QueryConsensusStateResponse { + // consensus state associated with the client identifier at the given height + google.protobuf.Any consensus_state = 1; + // merkle proof of existence + bytes proof = 2; + // height at which the proof was retrieved + ibc.core.client.v1.Height proof_height = 3 [(gogoproto.nullable) = false]; +} + +// QueryConsensusStatesRequest is the request type for the Query/ConsensusStates +// RPC method. +message QueryConsensusStatesRequest { + // client identifier + string client_id = 1; + // pagination request + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryConsensusStatesResponse is the response type for the +// Query/ConsensusStates RPC method +message QueryConsensusStatesResponse { + // consensus states associated with the identifier + repeated ConsensusStateWithHeight consensus_states = 1 [(gogoproto.nullable) = false]; + // pagination response + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} + +// QueryClientStatusRequest is the request type for the Query/ClientStatus RPC +// method +message QueryClientStatusRequest { + // client unique identifier + string client_id = 1; +} + +// QueryClientStatusResponse is the response type for the Query/ClientStatus RPC +// method. It returns the current status of the IBC client. +message QueryClientStatusResponse { + string status = 1; +} + +// QueryClientParamsRequest is the request type for the Query/ClientParams RPC +// method. +message QueryClientParamsRequest {} + +// QueryClientParamsResponse is the response type for the Query/ClientParams RPC +// method. +message QueryClientParamsResponse { + // params defines the parameters of the module. + Params params = 1; +} + +// QueryUpgradedClientStateRequest is the request type for the +// Query/UpgradedClientState RPC method +message QueryUpgradedClientStateRequest {} + +// QueryUpgradedClientStateResponse is the response type for the +// Query/UpgradedClientState RPC method. +message QueryUpgradedClientStateResponse { + // client state associated with the request identifier + google.protobuf.Any upgraded_client_state = 1; +} + +// QueryUpgradedConsensusStateRequest is the request type for the +// Query/UpgradedConsensusState RPC method +message QueryUpgradedConsensusStateRequest {} + +// QueryUpgradedConsensusStateResponse is the response type for the +// Query/UpgradedConsensusState RPC method. +message QueryUpgradedConsensusStateResponse { + // Consensus state associated with the request identifier + google.protobuf.Any upgraded_consensus_state = 1; +} diff --git a/packages/codegen/proto/ibc/core/client/v1/tx.proto b/packages/codegen/proto/ibc/core/client/v1/tx.proto new file mode 100644 index 00000000..82df96de --- /dev/null +++ b/packages/codegen/proto/ibc/core/client/v1/tx.proto @@ -0,0 +1,99 @@ +syntax = "proto3"; + +package ibc.core.client.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/02-client/types"; + +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; + +// Msg defines the ibc/client Msg service. +service Msg { + // CreateClient defines a rpc handler method for MsgCreateClient. + rpc CreateClient(MsgCreateClient) returns (MsgCreateClientResponse); + + // UpdateClient defines a rpc handler method for MsgUpdateClient. + rpc UpdateClient(MsgUpdateClient) returns (MsgUpdateClientResponse); + + // UpgradeClient defines a rpc handler method for MsgUpgradeClient. + rpc UpgradeClient(MsgUpgradeClient) returns (MsgUpgradeClientResponse); + + // SubmitMisbehaviour defines a rpc handler method for MsgSubmitMisbehaviour. + rpc SubmitMisbehaviour(MsgSubmitMisbehaviour) returns (MsgSubmitMisbehaviourResponse); +} + +// MsgCreateClient defines a message to create an IBC client +message MsgCreateClient { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // light client state + google.protobuf.Any client_state = 1 [(gogoproto.moretags) = "yaml:\"client_state\""]; + // consensus state associated with the client that corresponds to a given + // height. + google.protobuf.Any consensus_state = 2 [(gogoproto.moretags) = "yaml:\"consensus_state\""]; + // signer address + string signer = 3; +} + +// MsgCreateClientResponse defines the Msg/CreateClient response type. +message MsgCreateClientResponse {} + +// MsgUpdateClient defines an sdk.Msg to update a IBC client state using +// the given header. +message MsgUpdateClient { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // client unique identifier + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + // header to update the light client + google.protobuf.Any header = 2; + // signer address + string signer = 3; +} + +// MsgUpdateClientResponse defines the Msg/UpdateClient response type. +message MsgUpdateClientResponse {} + +// MsgUpgradeClient defines an sdk.Msg to upgrade an IBC client to a new client +// state +message MsgUpgradeClient { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // client unique identifier + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + // upgraded client state + google.protobuf.Any client_state = 2 [(gogoproto.moretags) = "yaml:\"client_state\""]; + // upgraded consensus state, only contains enough information to serve as a + // basis of trust in update logic + google.protobuf.Any consensus_state = 3 [(gogoproto.moretags) = "yaml:\"consensus_state\""]; + // proof that old chain committed to new client + bytes proof_upgrade_client = 4 [(gogoproto.moretags) = "yaml:\"proof_upgrade_client\""]; + // proof that old chain committed to new consensus state + bytes proof_upgrade_consensus_state = 5 [(gogoproto.moretags) = "yaml:\"proof_upgrade_consensus_state\""]; + // signer address + string signer = 6; +} + +// MsgUpgradeClientResponse defines the Msg/UpgradeClient response type. +message MsgUpgradeClientResponse {} + +// MsgSubmitMisbehaviour defines an sdk.Msg type that submits Evidence for +// light client misbehaviour. +message MsgSubmitMisbehaviour { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + // client unique identifier + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + // misbehaviour used for freezing the light client + google.protobuf.Any misbehaviour = 2; + // signer address + string signer = 3; +} + +// MsgSubmitMisbehaviourResponse defines the Msg/SubmitMisbehaviour response +// type. +message MsgSubmitMisbehaviourResponse {} diff --git a/packages/codegen/proto/ibc/core/commitment/v1/commitment.proto b/packages/codegen/proto/ibc/core/commitment/v1/commitment.proto new file mode 100644 index 00000000..b460b9a1 --- /dev/null +++ b/packages/codegen/proto/ibc/core/commitment/v1/commitment.proto @@ -0,0 +1,41 @@ +syntax = "proto3"; + +package ibc.core.commitment.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/23-commitment/types"; + +import "gogoproto/gogo.proto"; +import "confio/proofs.proto"; + +// MerkleRoot defines a merkle root hash. +// In the Cosmos SDK, the AppHash of a block header becomes the root. +message MerkleRoot { + option (gogoproto.goproto_getters) = false; + + bytes hash = 1; +} + +// MerklePrefix is merkle path prefixed to the key. +// The constructed key from the Path and the key will be append(Path.KeyPath, +// append(Path.KeyPrefix, key...)) +message MerklePrefix { + bytes key_prefix = 1 [(gogoproto.moretags) = "yaml:\"key_prefix\""]; +} + +// MerklePath is the path used to verify commitment proofs, which can be an +// arbitrary structured object (defined by a commitment type). +// MerklePath is represented from root-to-leaf +message MerklePath { + option (gogoproto.goproto_stringer) = false; + + repeated string key_path = 1 [(gogoproto.moretags) = "yaml:\"key_path\""]; +} + +// MerkleProof is a wrapper type over a chain of CommitmentProofs. +// It demonstrates membership or non-membership for an element or set of +// elements, verifiable in conjunction with a known commitment root. Proofs +// should be succinct. +// MerkleProofs are ordered from leaf-to-root +message MerkleProof { + repeated ics23.CommitmentProof proofs = 1; +} diff --git a/packages/codegen/proto/ibc/core/connection/v1/connection.proto b/packages/codegen/proto/ibc/core/connection/v1/connection.proto new file mode 100644 index 00000000..74c39e26 --- /dev/null +++ b/packages/codegen/proto/ibc/core/connection/v1/connection.proto @@ -0,0 +1,114 @@ +syntax = "proto3"; + +package ibc.core.connection.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/03-connection/types"; + +import "gogoproto/gogo.proto"; +import "ibc/core/commitment/v1/commitment.proto"; + +// ICS03 - Connection Data Structures as defined in +// https://github.com/cosmos/ibc/blob/master/spec/core/ics-003-connection-semantics#data-structures + +// ConnectionEnd defines a stateful object on a chain connected to another +// separate one. +// NOTE: there must only be 2 defined ConnectionEnds to establish +// a connection between two chains. +message ConnectionEnd { + option (gogoproto.goproto_getters) = false; + // client associated with this connection. + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + // IBC version which can be utilised to determine encodings or protocols for + // channels or packets utilising this connection. + repeated Version versions = 2; + // current state of the connection end. + State state = 3; + // counterparty chain associated with this connection. + Counterparty counterparty = 4 [(gogoproto.nullable) = false]; + // delay period that must pass before a consensus state can be used for + // packet-verification NOTE: delay period logic is only implemented by some + // clients. + uint64 delay_period = 5 [(gogoproto.moretags) = "yaml:\"delay_period\""]; +} + +// IdentifiedConnection defines a connection with additional connection +// identifier field. +message IdentifiedConnection { + option (gogoproto.goproto_getters) = false; + // connection identifier. + string id = 1 [(gogoproto.moretags) = "yaml:\"id\""]; + // client associated with this connection. + string client_id = 2 [(gogoproto.moretags) = "yaml:\"client_id\""]; + // IBC version which can be utilised to determine encodings or protocols for + // channels or packets utilising this connection + repeated Version versions = 3; + // current state of the connection end. + State state = 4; + // counterparty chain associated with this connection. + Counterparty counterparty = 5 [(gogoproto.nullable) = false]; + // delay period associated with this connection. + uint64 delay_period = 6 [(gogoproto.moretags) = "yaml:\"delay_period\""]; +} + +// State defines if a connection is in one of the following states: +// INIT, TRYOPEN, OPEN or UNINITIALIZED. +enum State { + option (gogoproto.goproto_enum_prefix) = false; + + // Default State + STATE_UNINITIALIZED_UNSPECIFIED = 0 [(gogoproto.enumvalue_customname) = "UNINITIALIZED"]; + // A connection end has just started the opening handshake. + STATE_INIT = 1 [(gogoproto.enumvalue_customname) = "INIT"]; + // A connection end has acknowledged the handshake step on the counterparty + // chain. + STATE_TRYOPEN = 2 [(gogoproto.enumvalue_customname) = "TRYOPEN"]; + // A connection end has completed the handshake. + STATE_OPEN = 3 [(gogoproto.enumvalue_customname) = "OPEN"]; +} + +// Counterparty defines the counterparty chain associated with a connection end. +message Counterparty { + option (gogoproto.goproto_getters) = false; + + // identifies the client on the counterparty chain associated with a given + // connection. + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + // identifies the connection end on the counterparty chain associated with a + // given connection. + string connection_id = 2 [(gogoproto.moretags) = "yaml:\"connection_id\""]; + // commitment merkle prefix of the counterparty chain. + ibc.core.commitment.v1.MerklePrefix prefix = 3 [(gogoproto.nullable) = false]; +} + +// ClientPaths define all the connection paths for a client state. +message ClientPaths { + // list of connection paths + repeated string paths = 1; +} + +// ConnectionPaths define all the connection paths for a given client state. +message ConnectionPaths { + // client state unique identifier + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + // list of connection paths + repeated string paths = 2; +} + +// Version defines the versioning scheme used to negotiate the IBC verison in +// the connection handshake. +message Version { + option (gogoproto.goproto_getters) = false; + + // unique version identifier + string identifier = 1; + // list of features compatible with the specified identifier + repeated string features = 2; +} + +// Params defines the set of Connection parameters. +message Params { + // maximum expected time per block (in nanoseconds), used to enforce block delay. This parameter should reflect the + // largest amount of time that the chain might reasonably take to produce the next block under normal operating + // conditions. A safe choice is 3-5x the expected time per block. + uint64 max_expected_time_per_block = 1 [(gogoproto.moretags) = "yaml:\"max_expected_time_per_block\""]; +} diff --git a/packages/codegen/proto/ibc/core/connection/v1/genesis.proto b/packages/codegen/proto/ibc/core/connection/v1/genesis.proto new file mode 100644 index 00000000..ec5be642 --- /dev/null +++ b/packages/codegen/proto/ibc/core/connection/v1/genesis.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package ibc.core.connection.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/03-connection/types"; + +import "gogoproto/gogo.proto"; +import "ibc/core/connection/v1/connection.proto"; + +// GenesisState defines the ibc connection submodule's genesis state. +message GenesisState { + repeated IdentifiedConnection connections = 1 [(gogoproto.nullable) = false]; + repeated ConnectionPaths client_connection_paths = 2 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"client_connection_paths\""]; + // the sequence for the next generated connection identifier + uint64 next_connection_sequence = 3 [(gogoproto.moretags) = "yaml:\"next_connection_sequence\""]; + Params params = 4 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/ibc/core/connection/v1/query.proto b/packages/codegen/proto/ibc/core/connection/v1/query.proto new file mode 100644 index 00000000..d668c3d2 --- /dev/null +++ b/packages/codegen/proto/ibc/core/connection/v1/query.proto @@ -0,0 +1,138 @@ +syntax = "proto3"; + +package ibc.core.connection.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/03-connection/types"; + +import "gogoproto/gogo.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "ibc/core/client/v1/client.proto"; +import "ibc/core/connection/v1/connection.proto"; +import "google/api/annotations.proto"; +import "google/protobuf/any.proto"; + +// Query provides defines the gRPC querier service +service Query { + // Connection queries an IBC connection end. + rpc Connection(QueryConnectionRequest) returns (QueryConnectionResponse) { + option (google.api.http).get = "/ibc/core/connection/v1/connections/{connection_id}"; + } + + // Connections queries all the IBC connections of a chain. + rpc Connections(QueryConnectionsRequest) returns (QueryConnectionsResponse) { + option (google.api.http).get = "/ibc/core/connection/v1/connections"; + } + + // ClientConnections queries the connection paths associated with a client + // state. + rpc ClientConnections(QueryClientConnectionsRequest) returns (QueryClientConnectionsResponse) { + option (google.api.http).get = "/ibc/core/connection/v1/client_connections/{client_id}"; + } + + // ConnectionClientState queries the client state associated with the + // connection. + rpc ConnectionClientState(QueryConnectionClientStateRequest) returns (QueryConnectionClientStateResponse) { + option (google.api.http).get = "/ibc/core/connection/v1/connections/{connection_id}/client_state"; + } + + // ConnectionConsensusState queries the consensus state associated with the + // connection. + rpc ConnectionConsensusState(QueryConnectionConsensusStateRequest) returns (QueryConnectionConsensusStateResponse) { + option (google.api.http).get = "/ibc/core/connection/v1/connections/{connection_id}/consensus_state/" + "revision/{revision_number}/height/{revision_height}"; + } +} + +// QueryConnectionRequest is the request type for the Query/Connection RPC +// method +message QueryConnectionRequest { + // connection unique identifier + string connection_id = 1; +} + +// QueryConnectionResponse is the response type for the Query/Connection RPC +// method. Besides the connection end, it includes a proof and the height from +// which the proof was retrieved. +message QueryConnectionResponse { + // connection associated with the request identifier + ibc.core.connection.v1.ConnectionEnd connection = 1; + // merkle proof of existence + bytes proof = 2; + // height at which the proof was retrieved + ibc.core.client.v1.Height proof_height = 3 [(gogoproto.nullable) = false]; +} + +// QueryConnectionsRequest is the request type for the Query/Connections RPC +// method +message QueryConnectionsRequest { + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// QueryConnectionsResponse is the response type for the Query/Connections RPC +// method. +message QueryConnectionsResponse { + // list of stored connections of the chain. + repeated ibc.core.connection.v1.IdentifiedConnection connections = 1; + // pagination response + cosmos.base.query.v1beta1.PageResponse pagination = 2; + // query block height + ibc.core.client.v1.Height height = 3 [(gogoproto.nullable) = false]; +} + +// QueryClientConnectionsRequest is the request type for the +// Query/ClientConnections RPC method +message QueryClientConnectionsRequest { + // client identifier associated with a connection + string client_id = 1; +} + +// QueryClientConnectionsResponse is the response type for the +// Query/ClientConnections RPC method +message QueryClientConnectionsResponse { + // slice of all the connection paths associated with a client. + repeated string connection_paths = 1; + // merkle proof of existence + bytes proof = 2; + // height at which the proof was generated + ibc.core.client.v1.Height proof_height = 3 [(gogoproto.nullable) = false]; +} + +// QueryConnectionClientStateRequest is the request type for the +// Query/ConnectionClientState RPC method +message QueryConnectionClientStateRequest { + // connection identifier + string connection_id = 1 [(gogoproto.moretags) = "yaml:\"connection_id\""]; +} + +// QueryConnectionClientStateResponse is the response type for the +// Query/ConnectionClientState RPC method +message QueryConnectionClientStateResponse { + // client state associated with the channel + ibc.core.client.v1.IdentifiedClientState identified_client_state = 1; + // merkle proof of existence + bytes proof = 2; + // height at which the proof was retrieved + ibc.core.client.v1.Height proof_height = 3 [(gogoproto.nullable) = false]; +} + +// QueryConnectionConsensusStateRequest is the request type for the +// Query/ConnectionConsensusState RPC method +message QueryConnectionConsensusStateRequest { + // connection identifier + string connection_id = 1 [(gogoproto.moretags) = "yaml:\"connection_id\""]; + uint64 revision_number = 2; + uint64 revision_height = 3; +} + +// QueryConnectionConsensusStateResponse is the response type for the +// Query/ConnectionConsensusState RPC method +message QueryConnectionConsensusStateResponse { + // consensus state associated with the channel + google.protobuf.Any consensus_state = 1; + // client ID associated with the consensus state + string client_id = 2; + // merkle proof of existence + bytes proof = 3; + // height at which the proof was retrieved + ibc.core.client.v1.Height proof_height = 4 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/ibc/core/connection/v1/tx.proto b/packages/codegen/proto/ibc/core/connection/v1/tx.proto new file mode 100644 index 00000000..9d4e577e --- /dev/null +++ b/packages/codegen/proto/ibc/core/connection/v1/tx.proto @@ -0,0 +1,119 @@ +syntax = "proto3"; + +package ibc.core.connection.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/03-connection/types"; + +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; +import "ibc/core/client/v1/client.proto"; +import "ibc/core/connection/v1/connection.proto"; + +// Msg defines the ibc/connection Msg service. +service Msg { + // ConnectionOpenInit defines a rpc handler method for MsgConnectionOpenInit. + rpc ConnectionOpenInit(MsgConnectionOpenInit) returns (MsgConnectionOpenInitResponse); + + // ConnectionOpenTry defines a rpc handler method for MsgConnectionOpenTry. + rpc ConnectionOpenTry(MsgConnectionOpenTry) returns (MsgConnectionOpenTryResponse); + + // ConnectionOpenAck defines a rpc handler method for MsgConnectionOpenAck. + rpc ConnectionOpenAck(MsgConnectionOpenAck) returns (MsgConnectionOpenAckResponse); + + // ConnectionOpenConfirm defines a rpc handler method for + // MsgConnectionOpenConfirm. + rpc ConnectionOpenConfirm(MsgConnectionOpenConfirm) returns (MsgConnectionOpenConfirmResponse); +} + +// MsgConnectionOpenInit defines the msg sent by an account on Chain A to +// initialize a connection with Chain B. +message MsgConnectionOpenInit { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + Counterparty counterparty = 2 [(gogoproto.nullable) = false]; + Version version = 3; + uint64 delay_period = 4 [(gogoproto.moretags) = "yaml:\"delay_period\""]; + string signer = 5; +} + +// MsgConnectionOpenInitResponse defines the Msg/ConnectionOpenInit response +// type. +message MsgConnectionOpenInitResponse {} + +// MsgConnectionOpenTry defines a msg sent by a Relayer to try to open a +// connection on Chain B. +message MsgConnectionOpenTry { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + // in the case of crossing hello's, when both chains call OpenInit, we need + // the connection identifier of the previous connection in state INIT + string previous_connection_id = 2 [(gogoproto.moretags) = "yaml:\"previous_connection_id\""]; + google.protobuf.Any client_state = 3 [(gogoproto.moretags) = "yaml:\"client_state\""]; + Counterparty counterparty = 4 [(gogoproto.nullable) = false]; + uint64 delay_period = 5 [(gogoproto.moretags) = "yaml:\"delay_period\""]; + repeated Version counterparty_versions = 6 [(gogoproto.moretags) = "yaml:\"counterparty_versions\""]; + ibc.core.client.v1.Height proof_height = 7 + [(gogoproto.moretags) = "yaml:\"proof_height\"", (gogoproto.nullable) = false]; + // proof of the initialization the connection on Chain A: `UNITIALIZED -> + // INIT` + bytes proof_init = 8 [(gogoproto.moretags) = "yaml:\"proof_init\""]; + // proof of client state included in message + bytes proof_client = 9 [(gogoproto.moretags) = "yaml:\"proof_client\""]; + // proof of client consensus state + bytes proof_consensus = 10 [(gogoproto.moretags) = "yaml:\"proof_consensus\""]; + ibc.core.client.v1.Height consensus_height = 11 + [(gogoproto.moretags) = "yaml:\"consensus_height\"", (gogoproto.nullable) = false]; + string signer = 12; +} + +// MsgConnectionOpenTryResponse defines the Msg/ConnectionOpenTry response type. +message MsgConnectionOpenTryResponse {} + +// MsgConnectionOpenAck defines a msg sent by a Relayer to Chain A to +// acknowledge the change of connection state to TRYOPEN on Chain B. +message MsgConnectionOpenAck { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string connection_id = 1 [(gogoproto.moretags) = "yaml:\"connection_id\""]; + string counterparty_connection_id = 2 [(gogoproto.moretags) = "yaml:\"counterparty_connection_id\""]; + Version version = 3; + google.protobuf.Any client_state = 4 [(gogoproto.moretags) = "yaml:\"client_state\""]; + ibc.core.client.v1.Height proof_height = 5 + [(gogoproto.moretags) = "yaml:\"proof_height\"", (gogoproto.nullable) = false]; + // proof of the initialization the connection on Chain B: `UNITIALIZED -> + // TRYOPEN` + bytes proof_try = 6 [(gogoproto.moretags) = "yaml:\"proof_try\""]; + // proof of client state included in message + bytes proof_client = 7 [(gogoproto.moretags) = "yaml:\"proof_client\""]; + // proof of client consensus state + bytes proof_consensus = 8 [(gogoproto.moretags) = "yaml:\"proof_consensus\""]; + ibc.core.client.v1.Height consensus_height = 9 + [(gogoproto.moretags) = "yaml:\"consensus_height\"", (gogoproto.nullable) = false]; + string signer = 10; +} + +// MsgConnectionOpenAckResponse defines the Msg/ConnectionOpenAck response type. +message MsgConnectionOpenAckResponse {} + +// MsgConnectionOpenConfirm defines a msg sent by a Relayer to Chain B to +// acknowledge the change of connection state to OPEN on Chain A. +message MsgConnectionOpenConfirm { + option (gogoproto.equal) = false; + option (gogoproto.goproto_getters) = false; + + string connection_id = 1 [(gogoproto.moretags) = "yaml:\"connection_id\""]; + // proof for the change of the connection state on Chain A: `INIT -> OPEN` + bytes proof_ack = 2 [(gogoproto.moretags) = "yaml:\"proof_ack\""]; + ibc.core.client.v1.Height proof_height = 3 + [(gogoproto.moretags) = "yaml:\"proof_height\"", (gogoproto.nullable) = false]; + string signer = 4; +} + +// MsgConnectionOpenConfirmResponse defines the Msg/ConnectionOpenConfirm +// response type. +message MsgConnectionOpenConfirmResponse {} diff --git a/packages/codegen/proto/ibc/core/port/v1/query.proto b/packages/codegen/proto/ibc/core/port/v1/query.proto new file mode 100644 index 00000000..3c7fb7cb --- /dev/null +++ b/packages/codegen/proto/ibc/core/port/v1/query.proto @@ -0,0 +1,35 @@ +syntax = "proto3"; + +package ibc.core.port.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/05-port/types"; + +import "ibc/core/channel/v1/channel.proto"; + +// Query defines the gRPC querier service +service Query { + // AppVersion queries an IBC Port and determines the appropriate application version to be used + rpc AppVersion(QueryAppVersionRequest) returns (QueryAppVersionResponse) {} +} + +// QueryAppVersionRequest is the request type for the Query/AppVersion RPC method +message QueryAppVersionRequest { + // port unique identifier + string port_id = 1; + // connection unique identifier + string connection_id = 2; + // whether the channel is ordered or unordered + ibc.core.channel.v1.Order ordering = 3; + // counterparty channel end + ibc.core.channel.v1.Counterparty counterparty = 4; + // proposed version + string proposed_version = 5; +} + +// QueryAppVersionResponse is the response type for the Query/AppVersion RPC method. +message QueryAppVersionResponse { + // port id associated with the request identifiers + string port_id = 1; + // supported app version + string version = 2; +} diff --git a/packages/codegen/proto/ibc/core/types/v1/genesis.proto b/packages/codegen/proto/ibc/core/types/v1/genesis.proto new file mode 100644 index 00000000..e39f6cdb --- /dev/null +++ b/packages/codegen/proto/ibc/core/types/v1/genesis.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +package ibc.core.types.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/types"; + +import "gogoproto/gogo.proto"; +import "ibc/core/client/v1/genesis.proto"; +import "ibc/core/connection/v1/genesis.proto"; +import "ibc/core/channel/v1/genesis.proto"; + +// GenesisState defines the ibc module's genesis state. +message GenesisState { + // ICS002 - Clients genesis state + ibc.core.client.v1.GenesisState client_genesis = 1 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"client_genesis\""]; + // ICS003 - Connections genesis state + ibc.core.connection.v1.GenesisState connection_genesis = 2 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"connection_genesis\""]; + // ICS004 - Channel genesis state + ibc.core.channel.v1.GenesisState channel_genesis = 3 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"channel_genesis\""]; +} diff --git a/packages/codegen/proto/ibc/lightclients/localhost/v1/localhost.proto b/packages/codegen/proto/ibc/lightclients/localhost/v1/localhost.proto new file mode 100644 index 00000000..4fe05b78 --- /dev/null +++ b/packages/codegen/proto/ibc/lightclients/localhost/v1/localhost.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package ibc.lightclients.localhost.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/light-clients/09-localhost/types"; + +import "gogoproto/gogo.proto"; +import "ibc/core/client/v1/client.proto"; + +// ClientState defines a loopback (localhost) client. It requires (read-only) +// access to keys outside the client prefix. +message ClientState { + option (gogoproto.goproto_getters) = false; + // self chain ID + string chain_id = 1 [(gogoproto.moretags) = "yaml:\"chain_id\""]; + // self latest block height + ibc.core.client.v1.Height height = 2 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/ibc/lightclients/solomachine/v1/solomachine.proto b/packages/codegen/proto/ibc/lightclients/solomachine/v1/solomachine.proto new file mode 100644 index 00000000..b9b8a3a2 --- /dev/null +++ b/packages/codegen/proto/ibc/lightclients/solomachine/v1/solomachine.proto @@ -0,0 +1,189 @@ +syntax = "proto3"; + +package ibc.lightclients.solomachine.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/core/02-client/legacy/v100"; + +import "ibc/core/connection/v1/connection.proto"; +import "ibc/core/channel/v1/channel.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; + +// ClientState defines a solo machine client that tracks the current consensus +// state and if the client is frozen. +message ClientState { + option (gogoproto.goproto_getters) = false; + // latest sequence of the client state + uint64 sequence = 1; + // frozen sequence of the solo machine + uint64 frozen_sequence = 2 [(gogoproto.moretags) = "yaml:\"frozen_sequence\""]; + ConsensusState consensus_state = 3 [(gogoproto.moretags) = "yaml:\"consensus_state\""]; + // when set to true, will allow governance to update a solo machine client. + // The client will be unfrozen if it is frozen. + bool allow_update_after_proposal = 4 [(gogoproto.moretags) = "yaml:\"allow_update_after_proposal\""]; +} + +// ConsensusState defines a solo machine consensus state. The sequence of a +// consensus state is contained in the "height" key used in storing the +// consensus state. +message ConsensusState { + option (gogoproto.goproto_getters) = false; + // public key of the solo machine + google.protobuf.Any public_key = 1 [(gogoproto.moretags) = "yaml:\"public_key\""]; + // diversifier allows the same public key to be re-used across different solo + // machine clients (potentially on different chains) without being considered + // misbehaviour. + string diversifier = 2; + uint64 timestamp = 3; +} + +// Header defines a solo machine consensus header +message Header { + option (gogoproto.goproto_getters) = false; + // sequence to update solo machine public key at + uint64 sequence = 1; + uint64 timestamp = 2; + bytes signature = 3; + google.protobuf.Any new_public_key = 4 [(gogoproto.moretags) = "yaml:\"new_public_key\""]; + string new_diversifier = 5 [(gogoproto.moretags) = "yaml:\"new_diversifier\""]; +} + +// Misbehaviour defines misbehaviour for a solo machine which consists +// of a sequence and two signatures over different messages at that sequence. +message Misbehaviour { + option (gogoproto.goproto_getters) = false; + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + uint64 sequence = 2; + SignatureAndData signature_one = 3 [(gogoproto.moretags) = "yaml:\"signature_one\""]; + SignatureAndData signature_two = 4 [(gogoproto.moretags) = "yaml:\"signature_two\""]; +} + +// SignatureAndData contains a signature and the data signed over to create that +// signature. +message SignatureAndData { + option (gogoproto.goproto_getters) = false; + bytes signature = 1; + DataType data_type = 2 [(gogoproto.moretags) = "yaml:\"data_type\""]; + bytes data = 3; + uint64 timestamp = 4; +} + +// TimestampedSignatureData contains the signature data and the timestamp of the +// signature. +message TimestampedSignatureData { + option (gogoproto.goproto_getters) = false; + bytes signature_data = 1 [(gogoproto.moretags) = "yaml:\"signature_data\""]; + uint64 timestamp = 2; +} + +// SignBytes defines the signed bytes used for signature verification. +message SignBytes { + option (gogoproto.goproto_getters) = false; + + uint64 sequence = 1; + uint64 timestamp = 2; + string diversifier = 3; + // type of the data used + DataType data_type = 4 [(gogoproto.moretags) = "yaml:\"data_type\""]; + // marshaled data + bytes data = 5; +} + +// DataType defines the type of solo machine proof being created. This is done +// to preserve uniqueness of different data sign byte encodings. +enum DataType { + option (gogoproto.goproto_enum_prefix) = false; + + // Default State + DATA_TYPE_UNINITIALIZED_UNSPECIFIED = 0 [(gogoproto.enumvalue_customname) = "UNSPECIFIED"]; + // Data type for client state verification + DATA_TYPE_CLIENT_STATE = 1 [(gogoproto.enumvalue_customname) = "CLIENT"]; + // Data type for consensus state verification + DATA_TYPE_CONSENSUS_STATE = 2 [(gogoproto.enumvalue_customname) = "CONSENSUS"]; + // Data type for connection state verification + DATA_TYPE_CONNECTION_STATE = 3 [(gogoproto.enumvalue_customname) = "CONNECTION"]; + // Data type for channel state verification + DATA_TYPE_CHANNEL_STATE = 4 [(gogoproto.enumvalue_customname) = "CHANNEL"]; + // Data type for packet commitment verification + DATA_TYPE_PACKET_COMMITMENT = 5 [(gogoproto.enumvalue_customname) = "PACKETCOMMITMENT"]; + // Data type for packet acknowledgement verification + DATA_TYPE_PACKET_ACKNOWLEDGEMENT = 6 [(gogoproto.enumvalue_customname) = "PACKETACKNOWLEDGEMENT"]; + // Data type for packet receipt absence verification + DATA_TYPE_PACKET_RECEIPT_ABSENCE = 7 [(gogoproto.enumvalue_customname) = "PACKETRECEIPTABSENCE"]; + // Data type for next sequence recv verification + DATA_TYPE_NEXT_SEQUENCE_RECV = 8 [(gogoproto.enumvalue_customname) = "NEXTSEQUENCERECV"]; + // Data type for header verification + DATA_TYPE_HEADER = 9 [(gogoproto.enumvalue_customname) = "HEADER"]; +} + +// HeaderData returns the SignBytes data for update verification. +message HeaderData { + option (gogoproto.goproto_getters) = false; + + // header public key + google.protobuf.Any new_pub_key = 1 [(gogoproto.moretags) = "yaml:\"new_pub_key\""]; + // header diversifier + string new_diversifier = 2 [(gogoproto.moretags) = "yaml:\"new_diversifier\""]; +} + +// ClientStateData returns the SignBytes data for client state verification. +message ClientStateData { + option (gogoproto.goproto_getters) = false; + + bytes path = 1; + google.protobuf.Any client_state = 2 [(gogoproto.moretags) = "yaml:\"client_state\""]; +} + +// ConsensusStateData returns the SignBytes data for consensus state +// verification. +message ConsensusStateData { + option (gogoproto.goproto_getters) = false; + + bytes path = 1; + google.protobuf.Any consensus_state = 2 [(gogoproto.moretags) = "yaml:\"consensus_state\""]; +} + +// ConnectionStateData returns the SignBytes data for connection state +// verification. +message ConnectionStateData { + option (gogoproto.goproto_getters) = false; + + bytes path = 1; + ibc.core.connection.v1.ConnectionEnd connection = 2; +} + +// ChannelStateData returns the SignBytes data for channel state +// verification. +message ChannelStateData { + option (gogoproto.goproto_getters) = false; + + bytes path = 1; + ibc.core.channel.v1.Channel channel = 2; +} + +// PacketCommitmentData returns the SignBytes data for packet commitment +// verification. +message PacketCommitmentData { + bytes path = 1; + bytes commitment = 2; +} + +// PacketAcknowledgementData returns the SignBytes data for acknowledgement +// verification. +message PacketAcknowledgementData { + bytes path = 1; + bytes acknowledgement = 2; +} + +// PacketReceiptAbsenceData returns the SignBytes data for +// packet receipt absence verification. +message PacketReceiptAbsenceData { + bytes path = 1; +} + +// NextSequenceRecvData returns the SignBytes data for verification of the next +// sequence to be received. +message NextSequenceRecvData { + bytes path = 1; + uint64 next_seq_recv = 2 [(gogoproto.moretags) = "yaml:\"next_seq_recv\""]; +} diff --git a/packages/codegen/proto/ibc/lightclients/solomachine/v2/solomachine.proto b/packages/codegen/proto/ibc/lightclients/solomachine/v2/solomachine.proto new file mode 100644 index 00000000..0c8c638c --- /dev/null +++ b/packages/codegen/proto/ibc/lightclients/solomachine/v2/solomachine.proto @@ -0,0 +1,189 @@ +syntax = "proto3"; + +package ibc.lightclients.solomachine.v2; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/light-clients/06-solomachine/types"; + +import "ibc/core/connection/v1/connection.proto"; +import "ibc/core/channel/v1/channel.proto"; +import "gogoproto/gogo.proto"; +import "google/protobuf/any.proto"; + +// ClientState defines a solo machine client that tracks the current consensus +// state and if the client is frozen. +message ClientState { + option (gogoproto.goproto_getters) = false; + // latest sequence of the client state + uint64 sequence = 1; + // frozen sequence of the solo machine + bool is_frozen = 2 [(gogoproto.moretags) = "yaml:\"is_frozen\""]; + ConsensusState consensus_state = 3 [(gogoproto.moretags) = "yaml:\"consensus_state\""]; + // when set to true, will allow governance to update a solo machine client. + // The client will be unfrozen if it is frozen. + bool allow_update_after_proposal = 4 [(gogoproto.moretags) = "yaml:\"allow_update_after_proposal\""]; +} + +// ConsensusState defines a solo machine consensus state. The sequence of a +// consensus state is contained in the "height" key used in storing the +// consensus state. +message ConsensusState { + option (gogoproto.goproto_getters) = false; + // public key of the solo machine + google.protobuf.Any public_key = 1 [(gogoproto.moretags) = "yaml:\"public_key\""]; + // diversifier allows the same public key to be re-used across different solo + // machine clients (potentially on different chains) without being considered + // misbehaviour. + string diversifier = 2; + uint64 timestamp = 3; +} + +// Header defines a solo machine consensus header +message Header { + option (gogoproto.goproto_getters) = false; + // sequence to update solo machine public key at + uint64 sequence = 1; + uint64 timestamp = 2; + bytes signature = 3; + google.protobuf.Any new_public_key = 4 [(gogoproto.moretags) = "yaml:\"new_public_key\""]; + string new_diversifier = 5 [(gogoproto.moretags) = "yaml:\"new_diversifier\""]; +} + +// Misbehaviour defines misbehaviour for a solo machine which consists +// of a sequence and two signatures over different messages at that sequence. +message Misbehaviour { + option (gogoproto.goproto_getters) = false; + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + uint64 sequence = 2; + SignatureAndData signature_one = 3 [(gogoproto.moretags) = "yaml:\"signature_one\""]; + SignatureAndData signature_two = 4 [(gogoproto.moretags) = "yaml:\"signature_two\""]; +} + +// SignatureAndData contains a signature and the data signed over to create that +// signature. +message SignatureAndData { + option (gogoproto.goproto_getters) = false; + bytes signature = 1; + DataType data_type = 2 [(gogoproto.moretags) = "yaml:\"data_type\""]; + bytes data = 3; + uint64 timestamp = 4; +} + +// TimestampedSignatureData contains the signature data and the timestamp of the +// signature. +message TimestampedSignatureData { + option (gogoproto.goproto_getters) = false; + bytes signature_data = 1 [(gogoproto.moretags) = "yaml:\"signature_data\""]; + uint64 timestamp = 2; +} + +// SignBytes defines the signed bytes used for signature verification. +message SignBytes { + option (gogoproto.goproto_getters) = false; + + uint64 sequence = 1; + uint64 timestamp = 2; + string diversifier = 3; + // type of the data used + DataType data_type = 4 [(gogoproto.moretags) = "yaml:\"data_type\""]; + // marshaled data + bytes data = 5; +} + +// DataType defines the type of solo machine proof being created. This is done +// to preserve uniqueness of different data sign byte encodings. +enum DataType { + option (gogoproto.goproto_enum_prefix) = false; + + // Default State + DATA_TYPE_UNINITIALIZED_UNSPECIFIED = 0 [(gogoproto.enumvalue_customname) = "UNSPECIFIED"]; + // Data type for client state verification + DATA_TYPE_CLIENT_STATE = 1 [(gogoproto.enumvalue_customname) = "CLIENT"]; + // Data type for consensus state verification + DATA_TYPE_CONSENSUS_STATE = 2 [(gogoproto.enumvalue_customname) = "CONSENSUS"]; + // Data type for connection state verification + DATA_TYPE_CONNECTION_STATE = 3 [(gogoproto.enumvalue_customname) = "CONNECTION"]; + // Data type for channel state verification + DATA_TYPE_CHANNEL_STATE = 4 [(gogoproto.enumvalue_customname) = "CHANNEL"]; + // Data type for packet commitment verification + DATA_TYPE_PACKET_COMMITMENT = 5 [(gogoproto.enumvalue_customname) = "PACKETCOMMITMENT"]; + // Data type for packet acknowledgement verification + DATA_TYPE_PACKET_ACKNOWLEDGEMENT = 6 [(gogoproto.enumvalue_customname) = "PACKETACKNOWLEDGEMENT"]; + // Data type for packet receipt absence verification + DATA_TYPE_PACKET_RECEIPT_ABSENCE = 7 [(gogoproto.enumvalue_customname) = "PACKETRECEIPTABSENCE"]; + // Data type for next sequence recv verification + DATA_TYPE_NEXT_SEQUENCE_RECV = 8 [(gogoproto.enumvalue_customname) = "NEXTSEQUENCERECV"]; + // Data type for header verification + DATA_TYPE_HEADER = 9 [(gogoproto.enumvalue_customname) = "HEADER"]; +} + +// HeaderData returns the SignBytes data for update verification. +message HeaderData { + option (gogoproto.goproto_getters) = false; + + // header public key + google.protobuf.Any new_pub_key = 1 [(gogoproto.moretags) = "yaml:\"new_pub_key\""]; + // header diversifier + string new_diversifier = 2 [(gogoproto.moretags) = "yaml:\"new_diversifier\""]; +} + +// ClientStateData returns the SignBytes data for client state verification. +message ClientStateData { + option (gogoproto.goproto_getters) = false; + + bytes path = 1; + google.protobuf.Any client_state = 2 [(gogoproto.moretags) = "yaml:\"client_state\""]; +} + +// ConsensusStateData returns the SignBytes data for consensus state +// verification. +message ConsensusStateData { + option (gogoproto.goproto_getters) = false; + + bytes path = 1; + google.protobuf.Any consensus_state = 2 [(gogoproto.moretags) = "yaml:\"consensus_state\""]; +} + +// ConnectionStateData returns the SignBytes data for connection state +// verification. +message ConnectionStateData { + option (gogoproto.goproto_getters) = false; + + bytes path = 1; + ibc.core.connection.v1.ConnectionEnd connection = 2; +} + +// ChannelStateData returns the SignBytes data for channel state +// verification. +message ChannelStateData { + option (gogoproto.goproto_getters) = false; + + bytes path = 1; + ibc.core.channel.v1.Channel channel = 2; +} + +// PacketCommitmentData returns the SignBytes data for packet commitment +// verification. +message PacketCommitmentData { + bytes path = 1; + bytes commitment = 2; +} + +// PacketAcknowledgementData returns the SignBytes data for acknowledgement +// verification. +message PacketAcknowledgementData { + bytes path = 1; + bytes acknowledgement = 2; +} + +// PacketReceiptAbsenceData returns the SignBytes data for +// packet receipt absence verification. +message PacketReceiptAbsenceData { + bytes path = 1; +} + +// NextSequenceRecvData returns the SignBytes data for verification of the next +// sequence to be received. +message NextSequenceRecvData { + bytes path = 1; + uint64 next_seq_recv = 2 [(gogoproto.moretags) = "yaml:\"next_seq_recv\""]; +} diff --git a/packages/codegen/proto/ibc/lightclients/tendermint/v1/tendermint.proto b/packages/codegen/proto/ibc/lightclients/tendermint/v1/tendermint.proto new file mode 100644 index 00000000..54e229b2 --- /dev/null +++ b/packages/codegen/proto/ibc/lightclients/tendermint/v1/tendermint.proto @@ -0,0 +1,115 @@ +syntax = "proto3"; + +package ibc.lightclients.tendermint.v1; + +option go_package = "github.com/cosmos/ibc-go/v2/modules/light-clients/07-tendermint/types"; + +import "tendermint/types/validator.proto"; +import "tendermint/types/types.proto"; +import "confio/proofs.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; +import "ibc/core/client/v1/client.proto"; +import "ibc/core/commitment/v1/commitment.proto"; +import "gogoproto/gogo.proto"; + +// ClientState from Tendermint tracks the current validator set, latest height, +// and a possible frozen height. +message ClientState { + option (gogoproto.goproto_getters) = false; + + string chain_id = 1; + Fraction trust_level = 2 [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"trust_level\""]; + // duration of the period since the LastestTimestamp during which the + // submitted headers are valid for upgrade + google.protobuf.Duration trusting_period = 3 + [(gogoproto.nullable) = false, (gogoproto.stdduration) = true, (gogoproto.moretags) = "yaml:\"trusting_period\""]; + // duration of the staking unbonding period + google.protobuf.Duration unbonding_period = 4 [ + (gogoproto.nullable) = false, + (gogoproto.stdduration) = true, + (gogoproto.moretags) = "yaml:\"unbonding_period\"" + ]; + // defines how much new (untrusted) header's Time can drift into the future. + google.protobuf.Duration max_clock_drift = 5 + [(gogoproto.nullable) = false, (gogoproto.stdduration) = true, (gogoproto.moretags) = "yaml:\"max_clock_drift\""]; + // Block height when the client was frozen due to a misbehaviour + ibc.core.client.v1.Height frozen_height = 6 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"frozen_height\""]; + // Latest height the client was updated to + ibc.core.client.v1.Height latest_height = 7 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"latest_height\""]; + + // Proof specifications used in verifying counterparty state + repeated ics23.ProofSpec proof_specs = 8 [(gogoproto.moretags) = "yaml:\"proof_specs\""]; + + // Path at which next upgraded client will be committed. + // Each element corresponds to the key for a single CommitmentProof in the + // chained proof. NOTE: ClientState must stored under + // `{upgradePath}/{upgradeHeight}/clientState` ConsensusState must be stored + // under `{upgradepath}/{upgradeHeight}/consensusState` For SDK chains using + // the default upgrade module, upgrade_path should be []string{"upgrade", + // "upgradedIBCState"}` + repeated string upgrade_path = 9 [(gogoproto.moretags) = "yaml:\"upgrade_path\""]; + + // This flag, when set to true, will allow governance to recover a client + // which has expired + bool allow_update_after_expiry = 10 [(gogoproto.moretags) = "yaml:\"allow_update_after_expiry\""]; + // This flag, when set to true, will allow governance to unfreeze a client + // whose chain has experienced a misbehaviour event + bool allow_update_after_misbehaviour = 11 [(gogoproto.moretags) = "yaml:\"allow_update_after_misbehaviour\""]; +} + +// ConsensusState defines the consensus state from Tendermint. +message ConsensusState { + option (gogoproto.goproto_getters) = false; + + // timestamp that corresponds to the block height in which the ConsensusState + // was stored. + google.protobuf.Timestamp timestamp = 1 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + // commitment root (i.e app hash) + ibc.core.commitment.v1.MerkleRoot root = 2 [(gogoproto.nullable) = false]; + bytes next_validators_hash = 3 [ + (gogoproto.casttype) = "github.com/tendermint/tendermint/libs/bytes.HexBytes", + (gogoproto.moretags) = "yaml:\"next_validators_hash\"" + ]; +} + +// Misbehaviour is a wrapper over two conflicting Headers +// that implements Misbehaviour interface expected by ICS-02 +message Misbehaviour { + option (gogoproto.goproto_getters) = false; + + string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; + Header header_1 = 2 [(gogoproto.customname) = "Header1", (gogoproto.moretags) = "yaml:\"header_1\""]; + Header header_2 = 3 [(gogoproto.customname) = "Header2", (gogoproto.moretags) = "yaml:\"header_2\""]; +} + +// Header defines the Tendermint client consensus Header. +// It encapsulates all the information necessary to update from a trusted +// Tendermint ConsensusState. The inclusion of TrustedHeight and +// TrustedValidators allows this update to process correctly, so long as the +// ConsensusState for the TrustedHeight exists, this removes race conditions +// among relayers The SignedHeader and ValidatorSet are the new untrusted update +// fields for the client. The TrustedHeight is the height of a stored +// ConsensusState on the client that will be used to verify the new untrusted +// header. The Trusted ConsensusState must be within the unbonding period of +// current time in order to correctly verify, and the TrustedValidators must +// hash to TrustedConsensusState.NextValidatorsHash since that is the last +// trusted validator set at the TrustedHeight. +message Header { + .tendermint.types.SignedHeader signed_header = 1 + [(gogoproto.embed) = true, (gogoproto.moretags) = "yaml:\"signed_header\""]; + + .tendermint.types.ValidatorSet validator_set = 2 [(gogoproto.moretags) = "yaml:\"validator_set\""]; + ibc.core.client.v1.Height trusted_height = 3 + [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"trusted_height\""]; + .tendermint.types.ValidatorSet trusted_validators = 4 [(gogoproto.moretags) = "yaml:\"trusted_validators\""]; +} + +// Fraction defines the protobuf message type for tmmath.Fraction that only +// supports positive values. +message Fraction { + uint64 numerator = 1; + uint64 denominator = 2; +} diff --git a/packages/codegen/proto/tendermint/LICENSE b/packages/codegen/proto/tendermint/LICENSE new file mode 100644 index 00000000..eaf92fbf --- /dev/null +++ b/packages/codegen/proto/tendermint/LICENSE @@ -0,0 +1,204 @@ +Tendermint Core +License: Apache2.0 + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2016 All in Bits, Inc + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/packages/codegen/proto/tendermint/README.md b/packages/codegen/proto/tendermint/README.md new file mode 100644 index 00000000..74fcf8b8 --- /dev/null +++ b/packages/codegen/proto/tendermint/README.md @@ -0,0 +1 @@ +# tendermint \ No newline at end of file diff --git a/packages/codegen/proto/tendermint/abci/types.proto b/packages/codegen/proto/tendermint/abci/types.proto new file mode 100644 index 00000000..d41a5226 --- /dev/null +++ b/packages/codegen/proto/tendermint/abci/types.proto @@ -0,0 +1,394 @@ +syntax = "proto3"; +package tendermint.abci; + +option go_package = "github.com/tendermint/tendermint/abci/types"; + +// For more information on gogo.proto, see: +// https://github.com/gogo/protobuf/blob/master/extensions.md +import "tendermint/crypto/proof.proto"; +import "tendermint/types/types.proto"; +import "tendermint/crypto/keys.proto"; +import "tendermint/types/params.proto"; +import "google/protobuf/timestamp.proto"; +import "gogoproto/gogo.proto"; + +// This file is copied from http://github.com/tendermint/abci +// NOTE: When using custom types, mind the warnings. +// https://github.com/gogo/protobuf/blob/master/custom_types.md#warnings-and-issues + +//---------------------------------------- +// Request types + +message Request { + oneof value { + RequestEcho echo = 1; + RequestFlush flush = 2; + RequestInfo info = 3; + RequestSetOption set_option = 4; + RequestInitChain init_chain = 5; + RequestQuery query = 6; + RequestBeginBlock begin_block = 7; + RequestCheckTx check_tx = 8; + RequestDeliverTx deliver_tx = 9; + RequestEndBlock end_block = 10; + RequestCommit commit = 11; + RequestListSnapshots list_snapshots = 12; + RequestOfferSnapshot offer_snapshot = 13; + RequestLoadSnapshotChunk load_snapshot_chunk = 14; + RequestApplySnapshotChunk apply_snapshot_chunk = 15; + } +} + +message RequestEcho { + string message = 1; +} + +message RequestFlush {} + +message RequestInfo { + string version = 1; + uint64 block_version = 2; + uint64 p2p_version = 3; +} + +// nondeterministic +message RequestSetOption { + string key = 1; + string value = 2; +} + +message RequestInitChain { + google.protobuf.Timestamp time = 1 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + string chain_id = 2; + ConsensusParams consensus_params = 3; + repeated ValidatorUpdate validators = 4 [(gogoproto.nullable) = false]; + bytes app_state_bytes = 5; + int64 initial_height = 6; +} + +message RequestQuery { + bytes data = 1; + string path = 2; + int64 height = 3; + bool prove = 4; +} + +message RequestBeginBlock { + bytes hash = 1; + tendermint.types.Header header = 2 [(gogoproto.nullable) = false]; + LastCommitInfo last_commit_info = 3 [(gogoproto.nullable) = false]; + repeated Evidence byzantine_validators = 4 [(gogoproto.nullable) = false]; +} + +enum CheckTxType { + NEW = 0 [(gogoproto.enumvalue_customname) = "New"]; + RECHECK = 1 [(gogoproto.enumvalue_customname) = "Recheck"]; +} + +message RequestCheckTx { + bytes tx = 1; + CheckTxType type = 2; +} + +message RequestDeliverTx { + bytes tx = 1; +} + +message RequestEndBlock { + int64 height = 1; +} + +message RequestCommit {} + +// lists available snapshots +message RequestListSnapshots {} + +// offers a snapshot to the application +message RequestOfferSnapshot { + Snapshot snapshot = 1; // snapshot offered by peers + bytes app_hash = 2; // light client-verified app hash for snapshot height +} + +// loads a snapshot chunk +message RequestLoadSnapshotChunk { + uint64 height = 1; + uint32 format = 2; + uint32 chunk = 3; +} + +// Applies a snapshot chunk +message RequestApplySnapshotChunk { + uint32 index = 1; + bytes chunk = 2; + string sender = 3; +} + +//---------------------------------------- +// Response types + +message Response { + oneof value { + ResponseException exception = 1; + ResponseEcho echo = 2; + ResponseFlush flush = 3; + ResponseInfo info = 4; + ResponseSetOption set_option = 5; + ResponseInitChain init_chain = 6; + ResponseQuery query = 7; + ResponseBeginBlock begin_block = 8; + ResponseCheckTx check_tx = 9; + ResponseDeliverTx deliver_tx = 10; + ResponseEndBlock end_block = 11; + ResponseCommit commit = 12; + ResponseListSnapshots list_snapshots = 13; + ResponseOfferSnapshot offer_snapshot = 14; + ResponseLoadSnapshotChunk load_snapshot_chunk = 15; + ResponseApplySnapshotChunk apply_snapshot_chunk = 16; + } +} + +// nondeterministic +message ResponseException { + string error = 1; +} + +message ResponseEcho { + string message = 1; +} + +message ResponseFlush {} + +message ResponseInfo { + string data = 1; + + string version = 2; + uint64 app_version = 3; + + int64 last_block_height = 4; + bytes last_block_app_hash = 5; +} + +// nondeterministic +message ResponseSetOption { + uint32 code = 1; + // bytes data = 2; + string log = 3; + string info = 4; +} + +message ResponseInitChain { + ConsensusParams consensus_params = 1; + repeated ValidatorUpdate validators = 2 [(gogoproto.nullable) = false]; + bytes app_hash = 3; +} + +message ResponseQuery { + uint32 code = 1; + // bytes data = 2; // use "value" instead. + string log = 3; // nondeterministic + string info = 4; // nondeterministic + int64 index = 5; + bytes key = 6; + bytes value = 7; + tendermint.crypto.ProofOps proof_ops = 8; + int64 height = 9; + string codespace = 10; +} + +message ResponseBeginBlock { + repeated Event events = 1 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "events,omitempty"]; +} + +message ResponseCheckTx { + uint32 code = 1; + bytes data = 2; + string log = 3; // nondeterministic + string info = 4; // nondeterministic + int64 gas_wanted = 5 [json_name = "gas_wanted"]; + int64 gas_used = 6 [json_name = "gas_used"]; + repeated Event events = 7 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "events,omitempty"]; + string codespace = 8; +} + +message ResponseDeliverTx { + uint32 code = 1; + bytes data = 2; + string log = 3; // nondeterministic + string info = 4; // nondeterministic + int64 gas_wanted = 5 [json_name = "gas_wanted"]; + int64 gas_used = 6 [json_name = "gas_used"]; + repeated Event events = 7 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "events,omitempty"]; + string codespace = 8; +} + +message ResponseEndBlock { + repeated ValidatorUpdate validator_updates = 1 [(gogoproto.nullable) = false]; + ConsensusParams consensus_param_updates = 2; + repeated Event events = 3 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "events,omitempty"]; +} + +message ResponseCommit { + // reserve 1 + bytes data = 2; + int64 retain_height = 3; +} + +message ResponseListSnapshots { + repeated Snapshot snapshots = 1; +} + +message ResponseOfferSnapshot { + Result result = 1; + + enum Result { + UNKNOWN = 0; // Unknown result, abort all snapshot restoration + ACCEPT = 1; // Snapshot accepted, apply chunks + ABORT = 2; // Abort all snapshot restoration + REJECT = 3; // Reject this specific snapshot, try others + REJECT_FORMAT = 4; // Reject all snapshots of this format, try others + REJECT_SENDER = 5; // Reject all snapshots from the sender(s), try others + } +} + +message ResponseLoadSnapshotChunk { + bytes chunk = 1; +} + +message ResponseApplySnapshotChunk { + Result result = 1; + repeated uint32 refetch_chunks = 2; // Chunks to refetch and reapply + repeated string reject_senders = 3; // Chunk senders to reject and ban + + enum Result { + UNKNOWN = 0; // Unknown result, abort all snapshot restoration + ACCEPT = 1; // Chunk successfully accepted + ABORT = 2; // Abort all snapshot restoration + RETRY = 3; // Retry chunk (combine with refetch and reject) + RETRY_SNAPSHOT = 4; // Retry snapshot (combine with refetch and reject) + REJECT_SNAPSHOT = 5; // Reject this snapshot, try others + } +} + +//---------------------------------------- +// Misc. + +// ConsensusParams contains all consensus-relevant parameters +// that can be adjusted by the abci app +message ConsensusParams { + BlockParams block = 1; + tendermint.types.EvidenceParams evidence = 2; + tendermint.types.ValidatorParams validator = 3; + tendermint.types.VersionParams version = 4; +} + +// BlockParams contains limits on the block size. +message BlockParams { + // Note: must be greater than 0 + int64 max_bytes = 1; + // Note: must be greater or equal to -1 + int64 max_gas = 2; +} + +message LastCommitInfo { + int32 round = 1; + repeated VoteInfo votes = 2 [(gogoproto.nullable) = false]; +} + +// Event allows application developers to attach additional information to +// ResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx. +// Later, transactions may be queried using these events. +message Event { + string type = 1; + repeated EventAttribute attributes = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "attributes,omitempty"]; +} + +// EventAttribute is a single key-value pair, associated with an event. +message EventAttribute { + bytes key = 1; + bytes value = 2; + bool index = 3; // nondeterministic +} + +// TxResult contains results of executing the transaction. +// +// One usage is indexing transaction results. +message TxResult { + int64 height = 1; + uint32 index = 2; + bytes tx = 3; + ResponseDeliverTx result = 4 [(gogoproto.nullable) = false]; +} + +//---------------------------------------- +// Blockchain Types + +// Validator +message Validator { + bytes address = 1; // The first 20 bytes of SHA256(public key) + // PubKey pub_key = 2 [(gogoproto.nullable)=false]; + int64 power = 3; // The voting power +} + +// ValidatorUpdate +message ValidatorUpdate { + tendermint.crypto.PublicKey pub_key = 1 [(gogoproto.nullable) = false]; + int64 power = 2; +} + +// VoteInfo +message VoteInfo { + Validator validator = 1 [(gogoproto.nullable) = false]; + bool signed_last_block = 2; +} + +enum EvidenceType { + UNKNOWN = 0; + DUPLICATE_VOTE = 1; + LIGHT_CLIENT_ATTACK = 2; +} + +message Evidence { + EvidenceType type = 1; + // The offending validator + Validator validator = 2 [(gogoproto.nullable) = false]; + // The height when the offense occurred + int64 height = 3; + // The corresponding time where the offense occurred + google.protobuf.Timestamp time = 4 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + // Total voting power of the validator set in case the ABCI application does + // not store historical validators. + // https://github.com/tendermint/tendermint/issues/4581 + int64 total_voting_power = 5; +} + +//---------------------------------------- +// State Sync Types + +message Snapshot { + uint64 height = 1; // The height at which the snapshot was taken + uint32 format = 2; // The application-specific snapshot format + uint32 chunks = 3; // Number of chunks in the snapshot + bytes hash = 4; // Arbitrary snapshot hash, equal only if identical + bytes metadata = 5; // Arbitrary application metadata +} + +//---------------------------------------- +// Service Definition + +service ABCIApplication { + rpc Echo(RequestEcho) returns (ResponseEcho); + rpc Flush(RequestFlush) returns (ResponseFlush); + rpc Info(RequestInfo) returns (ResponseInfo); + rpc SetOption(RequestSetOption) returns (ResponseSetOption); + rpc DeliverTx(RequestDeliverTx) returns (ResponseDeliverTx); + rpc CheckTx(RequestCheckTx) returns (ResponseCheckTx); + rpc Query(RequestQuery) returns (ResponseQuery); + rpc Commit(RequestCommit) returns (ResponseCommit); + rpc InitChain(RequestInitChain) returns (ResponseInitChain); + rpc BeginBlock(RequestBeginBlock) returns (ResponseBeginBlock); + rpc EndBlock(RequestEndBlock) returns (ResponseEndBlock); + rpc ListSnapshots(RequestListSnapshots) returns (ResponseListSnapshots); + rpc OfferSnapshot(RequestOfferSnapshot) returns (ResponseOfferSnapshot); + rpc LoadSnapshotChunk(RequestLoadSnapshotChunk) returns (ResponseLoadSnapshotChunk); + rpc ApplySnapshotChunk(RequestApplySnapshotChunk) returns (ResponseApplySnapshotChunk); +} diff --git a/packages/codegen/proto/tendermint/crypto/keys.proto b/packages/codegen/proto/tendermint/crypto/keys.proto new file mode 100644 index 00000000..16fd7adf --- /dev/null +++ b/packages/codegen/proto/tendermint/crypto/keys.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; +package tendermint.crypto; + +option go_package = "github.com/tendermint/tendermint/proto/tendermint/crypto"; + +import "gogoproto/gogo.proto"; + +// PublicKey defines the keys available for use with Tendermint Validators +message PublicKey { + option (gogoproto.compare) = true; + option (gogoproto.equal) = true; + + oneof sum { + bytes ed25519 = 1; + bytes secp256k1 = 2; + } +} diff --git a/packages/codegen/proto/tendermint/crypto/proof.proto b/packages/codegen/proto/tendermint/crypto/proof.proto new file mode 100644 index 00000000..975df768 --- /dev/null +++ b/packages/codegen/proto/tendermint/crypto/proof.proto @@ -0,0 +1,41 @@ +syntax = "proto3"; +package tendermint.crypto; + +option go_package = "github.com/tendermint/tendermint/proto/tendermint/crypto"; + +import "gogoproto/gogo.proto"; + +message Proof { + int64 total = 1; + int64 index = 2; + bytes leaf_hash = 3; + repeated bytes aunts = 4; +} + +message ValueOp { + // Encoded in ProofOp.Key. + bytes key = 1; + + // To encode in ProofOp.Data + Proof proof = 2; +} + +message DominoOp { + string key = 1; + string input = 2; + string output = 3; +} + +// ProofOp defines an operation used for calculating Merkle root +// The data could be arbitrary format, providing nessecary data +// for example neighbouring node hash +message ProofOp { + string type = 1; + bytes key = 2; + bytes data = 3; +} + +// ProofOps is Merkle proof defined by the list of ProofOps +message ProofOps { + repeated ProofOp ops = 1 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/tendermint/libs/bits/types.proto b/packages/codegen/proto/tendermint/libs/bits/types.proto new file mode 100644 index 00000000..3111d113 --- /dev/null +++ b/packages/codegen/proto/tendermint/libs/bits/types.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; +package tendermint.libs.bits; + +option go_package = "github.com/tendermint/tendermint/proto/tendermint/libs/bits"; + +message BitArray { + int64 bits = 1; + repeated uint64 elems = 2; +} diff --git a/packages/codegen/proto/tendermint/p2p/types.proto b/packages/codegen/proto/tendermint/p2p/types.proto new file mode 100644 index 00000000..216a6d8d --- /dev/null +++ b/packages/codegen/proto/tendermint/p2p/types.proto @@ -0,0 +1,42 @@ +syntax = "proto3"; +package tendermint.p2p; + +option go_package = "github.com/tendermint/tendermint/proto/tendermint/p2p"; + +import "gogoproto/gogo.proto"; +import "google/protobuf/timestamp.proto"; + +message ProtocolVersion { + uint64 p2p = 1 [(gogoproto.customname) = "P2P"]; + uint64 block = 2; + uint64 app = 3; +} + +message NodeInfo { + ProtocolVersion protocol_version = 1 [(gogoproto.nullable) = false]; + string node_id = 2 [(gogoproto.customname) = "NodeID"]; + string listen_addr = 3; + string network = 4; + string version = 5; + bytes channels = 6; + string moniker = 7; + NodeInfoOther other = 8 [(gogoproto.nullable) = false]; +} + +message NodeInfoOther { + string tx_index = 1; + string rpc_address = 2 [(gogoproto.customname) = "RPCAddress"]; +} + +message PeerInfo { + string id = 1 [(gogoproto.customname) = "ID"]; + repeated PeerAddressInfo address_info = 2; + google.protobuf.Timestamp last_connected = 3 [(gogoproto.stdtime) = true]; +} + +message PeerAddressInfo { + string address = 1; + google.protobuf.Timestamp last_dial_success = 2 [(gogoproto.stdtime) = true]; + google.protobuf.Timestamp last_dial_failure = 3 [(gogoproto.stdtime) = true]; + uint32 dial_failures = 4; +} diff --git a/packages/codegen/proto/tendermint/types/block.proto b/packages/codegen/proto/tendermint/types/block.proto new file mode 100644 index 00000000..84e9bb15 --- /dev/null +++ b/packages/codegen/proto/tendermint/types/block.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; +package tendermint.types; + +option go_package = "github.com/tendermint/tendermint/proto/tendermint/types"; + +import "gogoproto/gogo.proto"; +import "tendermint/types/types.proto"; +import "tendermint/types/evidence.proto"; + +message Block { + Header header = 1 [(gogoproto.nullable) = false]; + Data data = 2 [(gogoproto.nullable) = false]; + tendermint.types.EvidenceList evidence = 3 [(gogoproto.nullable) = false]; + Commit last_commit = 4; +} diff --git a/packages/codegen/proto/tendermint/types/evidence.proto b/packages/codegen/proto/tendermint/types/evidence.proto new file mode 100644 index 00000000..d9548a43 --- /dev/null +++ b/packages/codegen/proto/tendermint/types/evidence.proto @@ -0,0 +1,38 @@ +syntax = "proto3"; +package tendermint.types; + +option go_package = "github.com/tendermint/tendermint/proto/tendermint/types"; + +import "gogoproto/gogo.proto"; +import "google/protobuf/timestamp.proto"; +import "tendermint/types/types.proto"; +import "tendermint/types/validator.proto"; + +message Evidence { + oneof sum { + DuplicateVoteEvidence duplicate_vote_evidence = 1; + LightClientAttackEvidence light_client_attack_evidence = 2; + } +} + +// DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. +message DuplicateVoteEvidence { + tendermint.types.Vote vote_a = 1; + tendermint.types.Vote vote_b = 2; + int64 total_voting_power = 3; + int64 validator_power = 4; + google.protobuf.Timestamp timestamp = 5 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; +} + +// LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. +message LightClientAttackEvidence { + tendermint.types.LightBlock conflicting_block = 1; + int64 common_height = 2; + repeated tendermint.types.Validator byzantine_validators = 3; + int64 total_voting_power = 4; + google.protobuf.Timestamp timestamp = 5 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; +} + +message EvidenceList { + repeated Evidence evidence = 1 [(gogoproto.nullable) = false]; +} diff --git a/packages/codegen/proto/tendermint/types/params.proto b/packages/codegen/proto/tendermint/types/params.proto new file mode 100644 index 00000000..70789222 --- /dev/null +++ b/packages/codegen/proto/tendermint/types/params.proto @@ -0,0 +1,79 @@ +syntax = "proto3"; +package tendermint.types; + +option go_package = "github.com/tendermint/tendermint/proto/tendermint/types"; + +import "gogoproto/gogo.proto"; +import "google/protobuf/duration.proto"; + +option (gogoproto.equal_all) = true; + +// ConsensusParams contains consensus critical parameters that determine the +// validity of blocks. +message ConsensusParams { + BlockParams block = 1 [(gogoproto.nullable) = false]; + EvidenceParams evidence = 2 [(gogoproto.nullable) = false]; + ValidatorParams validator = 3 [(gogoproto.nullable) = false]; + VersionParams version = 4 [(gogoproto.nullable) = false]; +} + +// BlockParams contains limits on the block size. +message BlockParams { + // Max block size, in bytes. + // Note: must be greater than 0 + int64 max_bytes = 1; + // Max gas per block. + // Note: must be greater or equal to -1 + int64 max_gas = 2; + // Minimum time increment between consecutive blocks (in milliseconds) If the + // block header timestamp is ahead of the system clock, decrease this value. + // + // Not exposed to the application. + int64 time_iota_ms = 3; +} + +// EvidenceParams determine how we handle evidence of malfeasance. +message EvidenceParams { + // Max age of evidence, in blocks. + // + // The basic formula for calculating this is: MaxAgeDuration / {average block + // time}. + int64 max_age_num_blocks = 1; + + // Max age of evidence, in time. + // + // It should correspond with an app's "unbonding period" or other similar + // mechanism for handling [Nothing-At-Stake + // attacks](https://github.com/ethereum/wiki/wiki/Proof-of-Stake-FAQ#what-is-the-nothing-at-stake-problem-and-how-can-it-be-fixed). + google.protobuf.Duration max_age_duration = 2 [(gogoproto.nullable) = false, (gogoproto.stdduration) = true]; + + // This sets the maximum size of total evidence in bytes that can be committed in a single block. + // and should fall comfortably under the max block bytes. + // Default is 1048576 or 1MB + int64 max_bytes = 3; +} + +// ValidatorParams restrict the public key types validators can use. +// NOTE: uses ABCI pubkey naming, not Amino names. +message ValidatorParams { + option (gogoproto.populate) = true; + option (gogoproto.equal) = true; + + repeated string pub_key_types = 1; +} + +// VersionParams contains the ABCI application version. +message VersionParams { + option (gogoproto.populate) = true; + option (gogoproto.equal) = true; + + uint64 app_version = 1; +} + +// HashedParams is a subset of ConsensusParams. +// +// It is hashed into the Header.ConsensusHash. +message HashedParams { + int64 block_max_bytes = 1; + int64 block_max_gas = 2; +} diff --git a/packages/codegen/proto/tendermint/types/types.proto b/packages/codegen/proto/tendermint/types/types.proto new file mode 100644 index 00000000..57efc33c --- /dev/null +++ b/packages/codegen/proto/tendermint/types/types.proto @@ -0,0 +1,153 @@ +syntax = "proto3"; +package tendermint.types; + +option go_package = "github.com/tendermint/tendermint/proto/tendermint/types"; + +import "gogoproto/gogo.proto"; +import "google/protobuf/timestamp.proto"; +import "tendermint/crypto/proof.proto"; +import "tendermint/version/types.proto"; +import "tendermint/types/validator.proto"; + +// BlockIdFlag indicates which BlcokID the signature is for +enum BlockIDFlag { + option (gogoproto.goproto_enum_stringer) = true; + option (gogoproto.goproto_enum_prefix) = false; + + BLOCK_ID_FLAG_UNKNOWN = 0 [(gogoproto.enumvalue_customname) = "BlockIDFlagUnknown"]; + BLOCK_ID_FLAG_ABSENT = 1 [(gogoproto.enumvalue_customname) = "BlockIDFlagAbsent"]; + BLOCK_ID_FLAG_COMMIT = 2 [(gogoproto.enumvalue_customname) = "BlockIDFlagCommit"]; + BLOCK_ID_FLAG_NIL = 3 [(gogoproto.enumvalue_customname) = "BlockIDFlagNil"]; +} + +// SignedMsgType is a type of signed message in the consensus. +enum SignedMsgType { + option (gogoproto.goproto_enum_stringer) = true; + option (gogoproto.goproto_enum_prefix) = false; + + SIGNED_MSG_TYPE_UNKNOWN = 0 [(gogoproto.enumvalue_customname) = "UnknownType"]; + // Votes + SIGNED_MSG_TYPE_PREVOTE = 1 [(gogoproto.enumvalue_customname) = "PrevoteType"]; + SIGNED_MSG_TYPE_PRECOMMIT = 2 [(gogoproto.enumvalue_customname) = "PrecommitType"]; + + // Proposals + SIGNED_MSG_TYPE_PROPOSAL = 32 [(gogoproto.enumvalue_customname) = "ProposalType"]; +} + +// PartsetHeader +message PartSetHeader { + uint32 total = 1; + bytes hash = 2; +} + +message Part { + uint32 index = 1; + bytes bytes = 2; + tendermint.crypto.Proof proof = 3 [(gogoproto.nullable) = false]; +} + +// BlockID +message BlockID { + bytes hash = 1; + PartSetHeader part_set_header = 2 [(gogoproto.nullable) = false]; +} + +// -------------------------------- + +// Header defines the structure of a Tendermint block header. +message Header { + // basic block info + tendermint.version.Consensus version = 1 [(gogoproto.nullable) = false]; + string chain_id = 2 [(gogoproto.customname) = "ChainID"]; + int64 height = 3; + google.protobuf.Timestamp time = 4 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + + // prev block info + BlockID last_block_id = 5 [(gogoproto.nullable) = false]; + + // hashes of block data + bytes last_commit_hash = 6; // commit from validators from the last block + bytes data_hash = 7; // transactions + + // hashes from the app output from the prev block + bytes validators_hash = 8; // validators for the current block + bytes next_validators_hash = 9; // validators for the next block + bytes consensus_hash = 10; // consensus params for current block + bytes app_hash = 11; // state after txs from the previous block + bytes last_results_hash = 12; // root hash of all results from the txs from the previous block + + // consensus info + bytes evidence_hash = 13; // evidence included in the block + bytes proposer_address = 14; // original proposer of the block +} + +// Data contains the set of transactions included in the block +message Data { + // Txs that will be applied by state @ block.Height+1. + // NOTE: not all txs here are valid. We're just agreeing on the order first. + // This means that block.AppHash does not include these txs. + repeated bytes txs = 1; +} + +// Vote represents a prevote, precommit, or commit vote from validators for +// consensus. +message Vote { + SignedMsgType type = 1; + int64 height = 2; + int32 round = 3; + BlockID block_id = 4 [(gogoproto.nullable) = false, (gogoproto.customname) = "BlockID"]; // zero if vote is nil. + google.protobuf.Timestamp timestamp = 5 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + bytes validator_address = 6; + int32 validator_index = 7; + bytes signature = 8; +} + +// Commit contains the evidence that a block was committed by a set of validators. +message Commit { + int64 height = 1; + int32 round = 2; + BlockID block_id = 3 [(gogoproto.nullable) = false, (gogoproto.customname) = "BlockID"]; + repeated CommitSig signatures = 4 [(gogoproto.nullable) = false]; +} + +// CommitSig is a part of the Vote included in a Commit. +message CommitSig { + BlockIDFlag block_id_flag = 1; + bytes validator_address = 2; + google.protobuf.Timestamp timestamp = 3 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + bytes signature = 4; +} + +message Proposal { + SignedMsgType type = 1; + int64 height = 2; + int32 round = 3; + int32 pol_round = 4; + BlockID block_id = 5 [(gogoproto.customname) = "BlockID", (gogoproto.nullable) = false]; + google.protobuf.Timestamp timestamp = 6 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; + bytes signature = 7; +} + +message SignedHeader { + Header header = 1; + Commit commit = 2; +} + +message LightBlock { + SignedHeader signed_header = 1; + tendermint.types.ValidatorSet validator_set = 2; +} + +message BlockMeta { + BlockID block_id = 1 [(gogoproto.customname) = "BlockID", (gogoproto.nullable) = false]; + int64 block_size = 2; + Header header = 3 [(gogoproto.nullable) = false]; + int64 num_txs = 4; +} + +// TxProof represents a Merkle proof of the presence of a transaction in the Merkle tree. +message TxProof { + bytes root_hash = 1; + bytes data = 2; + tendermint.crypto.Proof proof = 3; +} diff --git a/packages/codegen/proto/tendermint/types/validator.proto b/packages/codegen/proto/tendermint/types/validator.proto new file mode 100644 index 00000000..49860b96 --- /dev/null +++ b/packages/codegen/proto/tendermint/types/validator.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; +package tendermint.types; + +option go_package = "github.com/tendermint/tendermint/proto/tendermint/types"; + +import "gogoproto/gogo.proto"; +import "tendermint/crypto/keys.proto"; + +message ValidatorSet { + repeated Validator validators = 1; + Validator proposer = 2; + int64 total_voting_power = 3; +} + +message Validator { + bytes address = 1; + tendermint.crypto.PublicKey pub_key = 2 [(gogoproto.nullable) = false]; + int64 voting_power = 3; + int64 proposer_priority = 4; +} + +message SimpleValidator { + tendermint.crypto.PublicKey pub_key = 1; + int64 voting_power = 2; +} diff --git a/packages/codegen/proto/tendermint/version/types.proto b/packages/codegen/proto/tendermint/version/types.proto new file mode 100644 index 00000000..6061868b --- /dev/null +++ b/packages/codegen/proto/tendermint/version/types.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; +package tendermint.version; + +option go_package = "github.com/tendermint/tendermint/proto/tendermint/version"; + +import "gogoproto/gogo.proto"; + +// App includes the protocol and software version for the application. +// This information is included in ResponseInfo. The App.Protocol can be +// updated in ResponseEndBlock. +message App { + uint64 protocol = 1; + string software = 2; +} + +// Consensus captures the consensus rules for processing a block in the blockchain, +// including all blockchain data structures and the rules of the application's +// state transition machine. +message Consensus { + option (gogoproto.equal) = true; + + uint64 block = 1; + uint64 app = 2; +} diff --git a/packages/codegen/src/amino/amino.ts b/packages/codegen/src/amino/amino.ts new file mode 100644 index 00000000..693da49f --- /dev/null +++ b/packages/codegen/src/amino/amino.ts @@ -0,0 +1 @@ +export {} \ No newline at end of file diff --git a/packages/codegen/src/amino/bundle.ts b/packages/codegen/src/amino/bundle.ts new file mode 100644 index 00000000..8ddc5409 --- /dev/null +++ b/packages/codegen/src/amino/bundle.ts @@ -0,0 +1,3 @@ +import * as _0 from "./amino"; +export const amino = { ..._0 +}; \ No newline at end of file diff --git a/packages/codegen/src/confio/proofs.ts b/packages/codegen/src/confio/proofs.ts new file mode 100644 index 00000000..76045f81 --- /dev/null +++ b/packages/codegen/src/confio/proofs.ts @@ -0,0 +1,1451 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../helpers"; +export enum HashOp { + /** NO_HASH - NO_HASH is the default if no data passed. Note this is an illegal argument some places. */ + NO_HASH = 0, + SHA256 = 1, + SHA512 = 2, + KECCAK = 3, + RIPEMD160 = 4, + + /** BITCOIN - ripemd160(sha256(x)) */ + BITCOIN = 5, + UNRECOGNIZED = -1, +} +export const HashOpSDKType = HashOp; +export function hashOpFromJSON(object: any): HashOp { + switch (object) { + case 0: + case "NO_HASH": + return HashOp.NO_HASH; + + case 1: + case "SHA256": + return HashOp.SHA256; + + case 2: + case "SHA512": + return HashOp.SHA512; + + case 3: + case "KECCAK": + return HashOp.KECCAK; + + case 4: + case "RIPEMD160": + return HashOp.RIPEMD160; + + case 5: + case "BITCOIN": + return HashOp.BITCOIN; + + case -1: + case "UNRECOGNIZED": + default: + return HashOp.UNRECOGNIZED; + } +} +export function hashOpToJSON(object: HashOp): string { + switch (object) { + case HashOp.NO_HASH: + return "NO_HASH"; + + case HashOp.SHA256: + return "SHA256"; + + case HashOp.SHA512: + return "SHA512"; + + case HashOp.KECCAK: + return "KECCAK"; + + case HashOp.RIPEMD160: + return "RIPEMD160"; + + case HashOp.BITCOIN: + return "BITCOIN"; + + case HashOp.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * LengthOp defines how to process the key and value of the LeafOp + * to include length information. After encoding the length with the given + * algorithm, the length will be prepended to the key and value bytes. + * (Each one with it's own encoded length) + */ + +export enum LengthOp { + /** NO_PREFIX - NO_PREFIX don't include any length info */ + NO_PREFIX = 0, + + /** VAR_PROTO - VAR_PROTO uses protobuf (and go-amino) varint encoding of the length */ + VAR_PROTO = 1, + + /** VAR_RLP - VAR_RLP uses rlp int encoding of the length */ + VAR_RLP = 2, + + /** FIXED32_BIG - FIXED32_BIG uses big-endian encoding of the length as a 32 bit integer */ + FIXED32_BIG = 3, + + /** FIXED32_LITTLE - FIXED32_LITTLE uses little-endian encoding of the length as a 32 bit integer */ + FIXED32_LITTLE = 4, + + /** FIXED64_BIG - FIXED64_BIG uses big-endian encoding of the length as a 64 bit integer */ + FIXED64_BIG = 5, + + /** FIXED64_LITTLE - FIXED64_LITTLE uses little-endian encoding of the length as a 64 bit integer */ + FIXED64_LITTLE = 6, + + /** REQUIRE_32_BYTES - REQUIRE_32_BYTES is like NONE, but will fail if the input is not exactly 32 bytes (sha256 output) */ + REQUIRE_32_BYTES = 7, + + /** REQUIRE_64_BYTES - REQUIRE_64_BYTES is like NONE, but will fail if the input is not exactly 64 bytes (sha512 output) */ + REQUIRE_64_BYTES = 8, + UNRECOGNIZED = -1, +} +export const LengthOpSDKType = LengthOp; +export function lengthOpFromJSON(object: any): LengthOp { + switch (object) { + case 0: + case "NO_PREFIX": + return LengthOp.NO_PREFIX; + + case 1: + case "VAR_PROTO": + return LengthOp.VAR_PROTO; + + case 2: + case "VAR_RLP": + return LengthOp.VAR_RLP; + + case 3: + case "FIXED32_BIG": + return LengthOp.FIXED32_BIG; + + case 4: + case "FIXED32_LITTLE": + return LengthOp.FIXED32_LITTLE; + + case 5: + case "FIXED64_BIG": + return LengthOp.FIXED64_BIG; + + case 6: + case "FIXED64_LITTLE": + return LengthOp.FIXED64_LITTLE; + + case 7: + case "REQUIRE_32_BYTES": + return LengthOp.REQUIRE_32_BYTES; + + case 8: + case "REQUIRE_64_BYTES": + return LengthOp.REQUIRE_64_BYTES; + + case -1: + case "UNRECOGNIZED": + default: + return LengthOp.UNRECOGNIZED; + } +} +export function lengthOpToJSON(object: LengthOp): string { + switch (object) { + case LengthOp.NO_PREFIX: + return "NO_PREFIX"; + + case LengthOp.VAR_PROTO: + return "VAR_PROTO"; + + case LengthOp.VAR_RLP: + return "VAR_RLP"; + + case LengthOp.FIXED32_BIG: + return "FIXED32_BIG"; + + case LengthOp.FIXED32_LITTLE: + return "FIXED32_LITTLE"; + + case LengthOp.FIXED64_BIG: + return "FIXED64_BIG"; + + case LengthOp.FIXED64_LITTLE: + return "FIXED64_LITTLE"; + + case LengthOp.REQUIRE_32_BYTES: + return "REQUIRE_32_BYTES"; + + case LengthOp.REQUIRE_64_BYTES: + return "REQUIRE_64_BYTES"; + + case LengthOp.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * ExistenceProof takes a key and a value and a set of steps to perform on it. + * The result of peforming all these steps will provide a "root hash", which can + * be compared to the value in a header. + * + * Since it is computationally infeasible to produce a hash collission for any of the used + * cryptographic hash functions, if someone can provide a series of operations to transform + * a given key and value into a root hash that matches some trusted root, these key and values + * must be in the referenced merkle tree. + * + * The only possible issue is maliablity in LeafOp, such as providing extra prefix data, + * which should be controlled by a spec. Eg. with lengthOp as NONE, + * prefix = FOO, key = BAR, value = CHOICE + * and + * prefix = F, key = OOBAR, value = CHOICE + * would produce the same value. + * + * With LengthOp this is tricker but not impossible. Which is why the "leafPrefixEqual" field + * in the ProofSpec is valuable to prevent this mutability. And why all trees should + * length-prefix the data before hashing it. + */ + +export interface ExistenceProof { + key: Uint8Array; + value: Uint8Array; + leaf?: LeafOp; + path: InnerOp[]; +} +/** + * ExistenceProof takes a key and a value and a set of steps to perform on it. + * The result of peforming all these steps will provide a "root hash", which can + * be compared to the value in a header. + * + * Since it is computationally infeasible to produce a hash collission for any of the used + * cryptographic hash functions, if someone can provide a series of operations to transform + * a given key and value into a root hash that matches some trusted root, these key and values + * must be in the referenced merkle tree. + * + * The only possible issue is maliablity in LeafOp, such as providing extra prefix data, + * which should be controlled by a spec. Eg. with lengthOp as NONE, + * prefix = FOO, key = BAR, value = CHOICE + * and + * prefix = F, key = OOBAR, value = CHOICE + * would produce the same value. + * + * With LengthOp this is tricker but not impossible. Which is why the "leafPrefixEqual" field + * in the ProofSpec is valuable to prevent this mutability. And why all trees should + * length-prefix the data before hashing it. + */ + +export interface ExistenceProofSDKType { + key: Uint8Array; + value: Uint8Array; + leaf?: LeafOpSDKType; + path: InnerOpSDKType[]; +} +/** + * NonExistenceProof takes a proof of two neighbors, one left of the desired key, + * one right of the desired key. If both proofs are valid AND they are neighbors, + * then there is no valid proof for the given key. + */ + +export interface NonExistenceProof { + /** TODO: remove this as unnecessary??? we prove a range */ + key: Uint8Array; + left?: ExistenceProof; + right?: ExistenceProof; +} +/** + * NonExistenceProof takes a proof of two neighbors, one left of the desired key, + * one right of the desired key. If both proofs are valid AND they are neighbors, + * then there is no valid proof for the given key. + */ + +export interface NonExistenceProofSDKType { + key: Uint8Array; + left?: ExistenceProofSDKType; + right?: ExistenceProofSDKType; +} +/** CommitmentProof is either an ExistenceProof or a NonExistenceProof, or a Batch of such messages */ + +export interface CommitmentProof { + exist?: ExistenceProof; + nonexist?: NonExistenceProof; + batch?: BatchProof; + compressed?: CompressedBatchProof; +} +/** CommitmentProof is either an ExistenceProof or a NonExistenceProof, or a Batch of such messages */ + +export interface CommitmentProofSDKType { + exist?: ExistenceProofSDKType; + nonexist?: NonExistenceProofSDKType; + batch?: BatchProofSDKType; + compressed?: CompressedBatchProofSDKType; +} +/** + * LeafOp represents the raw key-value data we wish to prove, and + * must be flexible to represent the internal transformation from + * the original key-value pairs into the basis hash, for many existing + * merkle trees. + * + * key and value are passed in. So that the signature of this operation is: + * leafOp(key, value) -> output + * + * To process this, first prehash the keys and values if needed (ANY means no hash in this case): + * hkey = prehashKey(key) + * hvalue = prehashValue(value) + * + * Then combine the bytes, and hash it + * output = hash(prefix || length(hkey) || hkey || length(hvalue) || hvalue) + */ + +export interface LeafOp { + hash: HashOp; + prehashKey: HashOp; + prehashValue: HashOp; + length: LengthOp; + /** + * prefix is a fixed bytes that may optionally be included at the beginning to differentiate + * a leaf node from an inner node. + */ + + prefix: Uint8Array; +} +/** + * LeafOp represents the raw key-value data we wish to prove, and + * must be flexible to represent the internal transformation from + * the original key-value pairs into the basis hash, for many existing + * merkle trees. + * + * key and value are passed in. So that the signature of this operation is: + * leafOp(key, value) -> output + * + * To process this, first prehash the keys and values if needed (ANY means no hash in this case): + * hkey = prehashKey(key) + * hvalue = prehashValue(value) + * + * Then combine the bytes, and hash it + * output = hash(prefix || length(hkey) || hkey || length(hvalue) || hvalue) + */ + +export interface LeafOpSDKType { + hash: HashOp; + prehash_key: HashOp; + prehash_value: HashOp; + length: LengthOp; + prefix: Uint8Array; +} +/** + * InnerOp represents a merkle-proof step that is not a leaf. + * It represents concatenating two children and hashing them to provide the next result. + * + * The result of the previous step is passed in, so the signature of this op is: + * innerOp(child) -> output + * + * The result of applying InnerOp should be: + * output = op.hash(op.prefix || child || op.suffix) + * + * where the || operator is concatenation of binary data, + * and child is the result of hashing all the tree below this step. + * + * Any special data, like prepending child with the length, or prepending the entire operation with + * some value to differentiate from leaf nodes, should be included in prefix and suffix. + * If either of prefix or suffix is empty, we just treat it as an empty string + */ + +export interface InnerOp { + hash: HashOp; + prefix: Uint8Array; + suffix: Uint8Array; +} +/** + * InnerOp represents a merkle-proof step that is not a leaf. + * It represents concatenating two children and hashing them to provide the next result. + * + * The result of the previous step is passed in, so the signature of this op is: + * innerOp(child) -> output + * + * The result of applying InnerOp should be: + * output = op.hash(op.prefix || child || op.suffix) + * + * where the || operator is concatenation of binary data, + * and child is the result of hashing all the tree below this step. + * + * Any special data, like prepending child with the length, or prepending the entire operation with + * some value to differentiate from leaf nodes, should be included in prefix and suffix. + * If either of prefix or suffix is empty, we just treat it as an empty string + */ + +export interface InnerOpSDKType { + hash: HashOp; + prefix: Uint8Array; + suffix: Uint8Array; +} +/** + * ProofSpec defines what the expected parameters are for a given proof type. + * This can be stored in the client and used to validate any incoming proofs. + * + * verify(ProofSpec, Proof) -> Proof | Error + * + * As demonstrated in tests, if we don't fix the algorithm used to calculate the + * LeafHash for a given tree, there are many possible key-value pairs that can + * generate a given hash (by interpretting the preimage differently). + * We need this for proper security, requires client knows a priori what + * tree format server uses. But not in code, rather a configuration object. + */ + +export interface ProofSpec { + /** + * any field in the ExistenceProof must be the same as in this spec. + * except Prefix, which is just the first bytes of prefix (spec can be longer) + */ + leafSpec?: LeafOp; + innerSpec?: InnerSpec; + /** max_depth (if > 0) is the maximum number of InnerOps allowed (mainly for fixed-depth tries) */ + + maxDepth: number; + /** min_depth (if > 0) is the minimum number of InnerOps allowed (mainly for fixed-depth tries) */ + + minDepth: number; +} +/** + * ProofSpec defines what the expected parameters are for a given proof type. + * This can be stored in the client and used to validate any incoming proofs. + * + * verify(ProofSpec, Proof) -> Proof | Error + * + * As demonstrated in tests, if we don't fix the algorithm used to calculate the + * LeafHash for a given tree, there are many possible key-value pairs that can + * generate a given hash (by interpretting the preimage differently). + * We need this for proper security, requires client knows a priori what + * tree format server uses. But not in code, rather a configuration object. + */ + +export interface ProofSpecSDKType { + leaf_spec?: LeafOpSDKType; + inner_spec?: InnerSpecSDKType; + max_depth: number; + min_depth: number; +} +/** + * InnerSpec contains all store-specific structure info to determine if two proofs from a + * given store are neighbors. + * + * This enables: + * + * isLeftMost(spec: InnerSpec, op: InnerOp) + * isRightMost(spec: InnerSpec, op: InnerOp) + * isLeftNeighbor(spec: InnerSpec, left: InnerOp, right: InnerOp) + */ + +export interface InnerSpec { + /** + * Child order is the ordering of the children node, must count from 0 + * iavl tree is [0, 1] (left then right) + * merk is [0, 2, 1] (left, right, here) + */ + childOrder: number[]; + childSize: number; + minPrefixLength: number; + maxPrefixLength: number; + /** empty child is the prehash image that is used when one child is nil (eg. 20 bytes of 0) */ + + emptyChild: Uint8Array; + /** hash is the algorithm that must be used for each InnerOp */ + + hash: HashOp; +} +/** + * InnerSpec contains all store-specific structure info to determine if two proofs from a + * given store are neighbors. + * + * This enables: + * + * isLeftMost(spec: InnerSpec, op: InnerOp) + * isRightMost(spec: InnerSpec, op: InnerOp) + * isLeftNeighbor(spec: InnerSpec, left: InnerOp, right: InnerOp) + */ + +export interface InnerSpecSDKType { + child_order: number[]; + child_size: number; + min_prefix_length: number; + max_prefix_length: number; + empty_child: Uint8Array; + hash: HashOp; +} +/** BatchProof is a group of multiple proof types than can be compressed */ + +export interface BatchProof { + entries: BatchEntry[]; +} +/** BatchProof is a group of multiple proof types than can be compressed */ + +export interface BatchProofSDKType { + entries: BatchEntrySDKType[]; +} +/** Use BatchEntry not CommitmentProof, to avoid recursion */ + +export interface BatchEntry { + exist?: ExistenceProof; + nonexist?: NonExistenceProof; +} +/** Use BatchEntry not CommitmentProof, to avoid recursion */ + +export interface BatchEntrySDKType { + exist?: ExistenceProofSDKType; + nonexist?: NonExistenceProofSDKType; +} +export interface CompressedBatchProof { + entries: CompressedBatchEntry[]; + lookupInners: InnerOp[]; +} +export interface CompressedBatchProofSDKType { + entries: CompressedBatchEntrySDKType[]; + lookup_inners: InnerOpSDKType[]; +} +/** Use BatchEntry not CommitmentProof, to avoid recursion */ + +export interface CompressedBatchEntry { + exist?: CompressedExistenceProof; + nonexist?: CompressedNonExistenceProof; +} +/** Use BatchEntry not CommitmentProof, to avoid recursion */ + +export interface CompressedBatchEntrySDKType { + exist?: CompressedExistenceProofSDKType; + nonexist?: CompressedNonExistenceProofSDKType; +} +export interface CompressedExistenceProof { + key: Uint8Array; + value: Uint8Array; + leaf?: LeafOp; + /** these are indexes into the lookup_inners table in CompressedBatchProof */ + + path: number[]; +} +export interface CompressedExistenceProofSDKType { + key: Uint8Array; + value: Uint8Array; + leaf?: LeafOpSDKType; + path: number[]; +} +export interface CompressedNonExistenceProof { + /** TODO: remove this as unnecessary??? we prove a range */ + key: Uint8Array; + left?: CompressedExistenceProof; + right?: CompressedExistenceProof; +} +export interface CompressedNonExistenceProofSDKType { + key: Uint8Array; + left?: CompressedExistenceProofSDKType; + right?: CompressedExistenceProofSDKType; +} + +function createBaseExistenceProof(): ExistenceProof { + return { + key: new Uint8Array(), + value: new Uint8Array(), + leaf: undefined, + path: [] + }; +} + +export const ExistenceProof = { + encode(message: ExistenceProof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + + if (message.leaf !== undefined) { + LeafOp.encode(message.leaf, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.path) { + InnerOp.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExistenceProof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExistenceProof(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + case 2: + message.value = reader.bytes(); + break; + + case 3: + message.leaf = LeafOp.decode(reader, reader.uint32()); + break; + + case 4: + message.path.push(InnerOp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ExistenceProof { + const message = createBaseExistenceProof(); + message.key = object.key ?? new Uint8Array(); + message.value = object.value ?? new Uint8Array(); + message.leaf = object.leaf !== undefined && object.leaf !== null ? LeafOp.fromPartial(object.leaf) : undefined; + message.path = object.path?.map(e => InnerOp.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseNonExistenceProof(): NonExistenceProof { + return { + key: new Uint8Array(), + left: undefined, + right: undefined + }; +} + +export const NonExistenceProof = { + encode(message: NonExistenceProof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + if (message.left !== undefined) { + ExistenceProof.encode(message.left, writer.uint32(18).fork()).ldelim(); + } + + if (message.right !== undefined) { + ExistenceProof.encode(message.right, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NonExistenceProof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNonExistenceProof(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + case 2: + message.left = ExistenceProof.decode(reader, reader.uint32()); + break; + + case 3: + message.right = ExistenceProof.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): NonExistenceProof { + const message = createBaseNonExistenceProof(); + message.key = object.key ?? new Uint8Array(); + message.left = object.left !== undefined && object.left !== null ? ExistenceProof.fromPartial(object.left) : undefined; + message.right = object.right !== undefined && object.right !== null ? ExistenceProof.fromPartial(object.right) : undefined; + return message; + } + +}; + +function createBaseCommitmentProof(): CommitmentProof { + return { + exist: undefined, + nonexist: undefined, + batch: undefined, + compressed: undefined + }; +} + +export const CommitmentProof = { + encode(message: CommitmentProof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.exist !== undefined) { + ExistenceProof.encode(message.exist, writer.uint32(10).fork()).ldelim(); + } + + if (message.nonexist !== undefined) { + NonExistenceProof.encode(message.nonexist, writer.uint32(18).fork()).ldelim(); + } + + if (message.batch !== undefined) { + BatchProof.encode(message.batch, writer.uint32(26).fork()).ldelim(); + } + + if (message.compressed !== undefined) { + CompressedBatchProof.encode(message.compressed, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CommitmentProof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommitmentProof(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.exist = ExistenceProof.decode(reader, reader.uint32()); + break; + + case 2: + message.nonexist = NonExistenceProof.decode(reader, reader.uint32()); + break; + + case 3: + message.batch = BatchProof.decode(reader, reader.uint32()); + break; + + case 4: + message.compressed = CompressedBatchProof.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CommitmentProof { + const message = createBaseCommitmentProof(); + message.exist = object.exist !== undefined && object.exist !== null ? ExistenceProof.fromPartial(object.exist) : undefined; + message.nonexist = object.nonexist !== undefined && object.nonexist !== null ? NonExistenceProof.fromPartial(object.nonexist) : undefined; + message.batch = object.batch !== undefined && object.batch !== null ? BatchProof.fromPartial(object.batch) : undefined; + message.compressed = object.compressed !== undefined && object.compressed !== null ? CompressedBatchProof.fromPartial(object.compressed) : undefined; + return message; + } + +}; + +function createBaseLeafOp(): LeafOp { + return { + hash: 0, + prehashKey: 0, + prehashValue: 0, + length: 0, + prefix: new Uint8Array() + }; +} + +export const LeafOp = { + encode(message: LeafOp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hash !== 0) { + writer.uint32(8).int32(message.hash); + } + + if (message.prehashKey !== 0) { + writer.uint32(16).int32(message.prehashKey); + } + + if (message.prehashValue !== 0) { + writer.uint32(24).int32(message.prehashValue); + } + + if (message.length !== 0) { + writer.uint32(32).int32(message.length); + } + + if (message.prefix.length !== 0) { + writer.uint32(42).bytes(message.prefix); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): LeafOp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseLeafOp(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.hash = (reader.int32() as any); + break; + + case 2: + message.prehashKey = (reader.int32() as any); + break; + + case 3: + message.prehashValue = (reader.int32() as any); + break; + + case 4: + message.length = (reader.int32() as any); + break; + + case 5: + message.prefix = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): LeafOp { + const message = createBaseLeafOp(); + message.hash = object.hash ?? 0; + message.prehashKey = object.prehashKey ?? 0; + message.prehashValue = object.prehashValue ?? 0; + message.length = object.length ?? 0; + message.prefix = object.prefix ?? new Uint8Array(); + return message; + } + +}; + +function createBaseInnerOp(): InnerOp { + return { + hash: 0, + prefix: new Uint8Array(), + suffix: new Uint8Array() + }; +} + +export const InnerOp = { + encode(message: InnerOp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hash !== 0) { + writer.uint32(8).int32(message.hash); + } + + if (message.prefix.length !== 0) { + writer.uint32(18).bytes(message.prefix); + } + + if (message.suffix.length !== 0) { + writer.uint32(26).bytes(message.suffix); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InnerOp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInnerOp(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.hash = (reader.int32() as any); + break; + + case 2: + message.prefix = reader.bytes(); + break; + + case 3: + message.suffix = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): InnerOp { + const message = createBaseInnerOp(); + message.hash = object.hash ?? 0; + message.prefix = object.prefix ?? new Uint8Array(); + message.suffix = object.suffix ?? new Uint8Array(); + return message; + } + +}; + +function createBaseProofSpec(): ProofSpec { + return { + leafSpec: undefined, + innerSpec: undefined, + maxDepth: 0, + minDepth: 0 + }; +} + +export const ProofSpec = { + encode(message: ProofSpec, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.leafSpec !== undefined) { + LeafOp.encode(message.leafSpec, writer.uint32(10).fork()).ldelim(); + } + + if (message.innerSpec !== undefined) { + InnerSpec.encode(message.innerSpec, writer.uint32(18).fork()).ldelim(); + } + + if (message.maxDepth !== 0) { + writer.uint32(24).int32(message.maxDepth); + } + + if (message.minDepth !== 0) { + writer.uint32(32).int32(message.minDepth); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProofSpec { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProofSpec(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.leafSpec = LeafOp.decode(reader, reader.uint32()); + break; + + case 2: + message.innerSpec = InnerSpec.decode(reader, reader.uint32()); + break; + + case 3: + message.maxDepth = reader.int32(); + break; + + case 4: + message.minDepth = reader.int32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ProofSpec { + const message = createBaseProofSpec(); + message.leafSpec = object.leafSpec !== undefined && object.leafSpec !== null ? LeafOp.fromPartial(object.leafSpec) : undefined; + message.innerSpec = object.innerSpec !== undefined && object.innerSpec !== null ? InnerSpec.fromPartial(object.innerSpec) : undefined; + message.maxDepth = object.maxDepth ?? 0; + message.minDepth = object.minDepth ?? 0; + return message; + } + +}; + +function createBaseInnerSpec(): InnerSpec { + return { + childOrder: [], + childSize: 0, + minPrefixLength: 0, + maxPrefixLength: 0, + emptyChild: new Uint8Array(), + hash: 0 + }; +} + +export const InnerSpec = { + encode(message: InnerSpec, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + + for (const v of message.childOrder) { + writer.int32(v); + } + + writer.ldelim(); + + if (message.childSize !== 0) { + writer.uint32(16).int32(message.childSize); + } + + if (message.minPrefixLength !== 0) { + writer.uint32(24).int32(message.minPrefixLength); + } + + if (message.maxPrefixLength !== 0) { + writer.uint32(32).int32(message.maxPrefixLength); + } + + if (message.emptyChild.length !== 0) { + writer.uint32(42).bytes(message.emptyChild); + } + + if (message.hash !== 0) { + writer.uint32(48).int32(message.hash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InnerSpec { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInnerSpec(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.childOrder.push(reader.int32()); + } + } else { + message.childOrder.push(reader.int32()); + } + + break; + + case 2: + message.childSize = reader.int32(); + break; + + case 3: + message.minPrefixLength = reader.int32(); + break; + + case 4: + message.maxPrefixLength = reader.int32(); + break; + + case 5: + message.emptyChild = reader.bytes(); + break; + + case 6: + message.hash = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): InnerSpec { + const message = createBaseInnerSpec(); + message.childOrder = object.childOrder?.map(e => e) || []; + message.childSize = object.childSize ?? 0; + message.minPrefixLength = object.minPrefixLength ?? 0; + message.maxPrefixLength = object.maxPrefixLength ?? 0; + message.emptyChild = object.emptyChild ?? new Uint8Array(); + message.hash = object.hash ?? 0; + return message; + } + +}; + +function createBaseBatchProof(): BatchProof { + return { + entries: [] + }; +} + +export const BatchProof = { + encode(message: BatchProof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.entries) { + BatchEntry.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BatchProof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBatchProof(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.entries.push(BatchEntry.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BatchProof { + const message = createBaseBatchProof(); + message.entries = object.entries?.map(e => BatchEntry.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseBatchEntry(): BatchEntry { + return { + exist: undefined, + nonexist: undefined + }; +} + +export const BatchEntry = { + encode(message: BatchEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.exist !== undefined) { + ExistenceProof.encode(message.exist, writer.uint32(10).fork()).ldelim(); + } + + if (message.nonexist !== undefined) { + NonExistenceProof.encode(message.nonexist, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BatchEntry { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBatchEntry(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.exist = ExistenceProof.decode(reader, reader.uint32()); + break; + + case 2: + message.nonexist = NonExistenceProof.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BatchEntry { + const message = createBaseBatchEntry(); + message.exist = object.exist !== undefined && object.exist !== null ? ExistenceProof.fromPartial(object.exist) : undefined; + message.nonexist = object.nonexist !== undefined && object.nonexist !== null ? NonExistenceProof.fromPartial(object.nonexist) : undefined; + return message; + } + +}; + +function createBaseCompressedBatchProof(): CompressedBatchProof { + return { + entries: [], + lookupInners: [] + }; +} + +export const CompressedBatchProof = { + encode(message: CompressedBatchProof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.entries) { + CompressedBatchEntry.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.lookupInners) { + InnerOp.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CompressedBatchProof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCompressedBatchProof(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.entries.push(CompressedBatchEntry.decode(reader, reader.uint32())); + break; + + case 2: + message.lookupInners.push(InnerOp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CompressedBatchProof { + const message = createBaseCompressedBatchProof(); + message.entries = object.entries?.map(e => CompressedBatchEntry.fromPartial(e)) || []; + message.lookupInners = object.lookupInners?.map(e => InnerOp.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseCompressedBatchEntry(): CompressedBatchEntry { + return { + exist: undefined, + nonexist: undefined + }; +} + +export const CompressedBatchEntry = { + encode(message: CompressedBatchEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.exist !== undefined) { + CompressedExistenceProof.encode(message.exist, writer.uint32(10).fork()).ldelim(); + } + + if (message.nonexist !== undefined) { + CompressedNonExistenceProof.encode(message.nonexist, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CompressedBatchEntry { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCompressedBatchEntry(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.exist = CompressedExistenceProof.decode(reader, reader.uint32()); + break; + + case 2: + message.nonexist = CompressedNonExistenceProof.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CompressedBatchEntry { + const message = createBaseCompressedBatchEntry(); + message.exist = object.exist !== undefined && object.exist !== null ? CompressedExistenceProof.fromPartial(object.exist) : undefined; + message.nonexist = object.nonexist !== undefined && object.nonexist !== null ? CompressedNonExistenceProof.fromPartial(object.nonexist) : undefined; + return message; + } + +}; + +function createBaseCompressedExistenceProof(): CompressedExistenceProof { + return { + key: new Uint8Array(), + value: new Uint8Array(), + leaf: undefined, + path: [] + }; +} + +export const CompressedExistenceProof = { + encode(message: CompressedExistenceProof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + + if (message.leaf !== undefined) { + LeafOp.encode(message.leaf, writer.uint32(26).fork()).ldelim(); + } + + writer.uint32(34).fork(); + + for (const v of message.path) { + writer.int32(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CompressedExistenceProof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCompressedExistenceProof(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + case 2: + message.value = reader.bytes(); + break; + + case 3: + message.leaf = LeafOp.decode(reader, reader.uint32()); + break; + + case 4: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CompressedExistenceProof { + const message = createBaseCompressedExistenceProof(); + message.key = object.key ?? new Uint8Array(); + message.value = object.value ?? new Uint8Array(); + message.leaf = object.leaf !== undefined && object.leaf !== null ? LeafOp.fromPartial(object.leaf) : undefined; + message.path = object.path?.map(e => e) || []; + return message; + } + +}; + +function createBaseCompressedNonExistenceProof(): CompressedNonExistenceProof { + return { + key: new Uint8Array(), + left: undefined, + right: undefined + }; +} + +export const CompressedNonExistenceProof = { + encode(message: CompressedNonExistenceProof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + if (message.left !== undefined) { + CompressedExistenceProof.encode(message.left, writer.uint32(18).fork()).ldelim(); + } + + if (message.right !== undefined) { + CompressedExistenceProof.encode(message.right, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CompressedNonExistenceProof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCompressedNonExistenceProof(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + case 2: + message.left = CompressedExistenceProof.decode(reader, reader.uint32()); + break; + + case 3: + message.right = CompressedExistenceProof.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CompressedNonExistenceProof { + const message = createBaseCompressedNonExistenceProof(); + message.key = object.key ?? new Uint8Array(); + message.left = object.left !== undefined && object.left !== null ? CompressedExistenceProof.fromPartial(object.left) : undefined; + message.right = object.right !== undefined && object.right !== null ? CompressedExistenceProof.fromPartial(object.right) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/app/v1alpha1/config.ts b/packages/codegen/src/cosmos/app/v1alpha1/config.ts new file mode 100644 index 00000000..90c25282 --- /dev/null +++ b/packages/codegen/src/cosmos/app/v1alpha1/config.ts @@ -0,0 +1,159 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * Config represents the configuration for a Cosmos SDK ABCI app. + * It is intended that all state machine logic including the version of + * baseapp and tx handlers (and possibly even Tendermint) that an app needs + * can be described in a config object. For compatibility, the framework should + * allow a mixture of declarative and imperative app wiring, however, apps + * that strive for the maximum ease of maintainability should be able to describe + * their state machine with a config object alone. + */ + +export interface Config { + /** modules are the module configurations for the app. */ + modules: ModuleConfig[]; +} +/** + * Config represents the configuration for a Cosmos SDK ABCI app. + * It is intended that all state machine logic including the version of + * baseapp and tx handlers (and possibly even Tendermint) that an app needs + * can be described in a config object. For compatibility, the framework should + * allow a mixture of declarative and imperative app wiring, however, apps + * that strive for the maximum ease of maintainability should be able to describe + * their state machine with a config object alone. + */ + +export interface ConfigSDKType { + modules: ModuleConfigSDKType[]; +} +/** ModuleConfig is a module configuration for an app. */ + +export interface ModuleConfig { + /** + * name is the unique name of the module within the app. It should be a name + * that persists between different versions of a module so that modules + * can be smoothly upgraded to new versions. + * + * For example, for the module cosmos.bank.module.v1.Module, we may chose + * to simply name the module "bank" in the app. When we upgrade to + * cosmos.bank.module.v2.Module, the app-specific name "bank" stays the same + * and the framework knows that the v2 module should receive all the same state + * that the v1 module had. Note: modules should provide info on which versions + * they can migrate from in the ModuleDescriptor.can_migration_from field. + */ + name: string; + /** + * config is the config object for the module. Module config messages should + * define a ModuleDescriptor using the cosmos.app.v1alpha1.is_module extension. + */ + + config?: Any; +} +/** ModuleConfig is a module configuration for an app. */ + +export interface ModuleConfigSDKType { + name: string; + config?: AnySDKType; +} + +function createBaseConfig(): Config { + return { + modules: [] + }; +} + +export const Config = { + encode(message: Config, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.modules) { + ModuleConfig.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Config { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConfig(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.modules.push(ModuleConfig.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Config { + const message = createBaseConfig(); + message.modules = object.modules?.map(e => ModuleConfig.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseModuleConfig(): ModuleConfig { + return { + name: "", + config: undefined + }; +} + +export const ModuleConfig = { + encode(message: ModuleConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.config !== undefined) { + Any.encode(message.config, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleConfig { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleConfig(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.config = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ModuleConfig { + const message = createBaseModuleConfig(); + message.name = object.name ?? ""; + message.config = object.config !== undefined && object.config !== null ? Any.fromPartial(object.config) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/app/v1alpha1/module.ts b/packages/codegen/src/cosmos/app/v1alpha1/module.ts new file mode 100644 index 00000000..524b35f0 --- /dev/null +++ b/packages/codegen/src/cosmos/app/v1alpha1/module.ts @@ -0,0 +1,276 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** ModuleDescriptor describes an app module. */ + +export interface ModuleDescriptor { + /** + * go_import names the package that should be imported by an app to load the + * module in the runtime module registry. Either go_import must be defined here + * or the go_package option must be defined at the file level to indicate + * to users where to location the module implementation. go_import takes + * precedence over go_package when both are defined. + */ + goImport: string; + /** + * use_package refers to a protobuf package that this module + * uses and exposes to the world. In an app, only one module should "use" + * or own a single protobuf package. It is assumed that the module uses + * all of the .proto files in a single package. + */ + + usePackage: PackageReference[]; + /** + * can_migrate_from defines which module versions this module can migrate + * state from. The framework will check that one module version is able to + * migrate from a previous module version before attempting to update its + * config. It is assumed that modules can transitively migrate from earlier + * versions. For instance if v3 declares it can migrate from v2, and v2 + * declares it can migrate from v1, the framework knows how to migrate + * from v1 to v3, assuming all 3 module versions are registered at runtime. + */ + + canMigrateFrom: MigrateFromInfo[]; +} +/** ModuleDescriptor describes an app module. */ + +export interface ModuleDescriptorSDKType { + go_import: string; + use_package: PackageReferenceSDKType[]; + can_migrate_from: MigrateFromInfoSDKType[]; +} +/** PackageReference is a reference to a protobuf package used by a module. */ + +export interface PackageReference { + /** name is the fully-qualified name of the package. */ + name: string; + /** + * revision is the optional revision of the package that is being used. + * Protobuf packages used in Cosmos should generally have a major version + * as the last part of the package name, ex. foo.bar.baz.v1. + * The revision of a package can be thought of as the minor version of a + * package which has additional backwards compatible definitions that weren't + * present in a previous version. + * + * A package should indicate its revision with a source code comment + * above the package declaration in one of its fields containing the + * test "Revision N" where N is an integer revision. All packages start + * at revision 0 the first time they are released in a module. + * + * When a new version of a module is released and items are added to existing + * .proto files, these definitions should contain comments of the form + * "Since Revision N" where N is an integer revision. + * + * When the module runtime starts up, it will check the pinned proto + * image and panic if there are runtime protobuf definitions that are not + * in the pinned descriptor which do not have + * a "Since Revision N" comment or have a "Since Revision N" comment where + * N is <= to the revision specified here. This indicates that the protobuf + * files have been updated, but the pinned file descriptor hasn't. + * + * If there are items in the pinned file descriptor with a revision + * greater than the value indicated here, this will also cause a panic + * as it may mean that the pinned descriptor for a legacy module has been + * improperly updated or that there is some other versioning discrepancy. + * Runtime protobuf definitions will also be checked for compatibility + * with pinned file descriptors to make sure there are no incompatible changes. + * + * This behavior ensures that: + * * pinned proto images are up-to-date + * * protobuf files are carefully annotated with revision comments which + * are important good client UX + * * protobuf files are changed in backwards and forwards compatible ways + */ + + revision: number; +} +/** PackageReference is a reference to a protobuf package used by a module. */ + +export interface PackageReferenceSDKType { + name: string; + revision: number; +} +/** + * MigrateFromInfo is information on a module version that a newer module + * can migrate from. + */ + +export interface MigrateFromInfo { + /** + * module is the fully-qualified protobuf name of the module config object + * for the previous module version, ex: "cosmos.group.module.v1.Module". + */ + module: string; +} +/** + * MigrateFromInfo is information on a module version that a newer module + * can migrate from. + */ + +export interface MigrateFromInfoSDKType { + module: string; +} + +function createBaseModuleDescriptor(): ModuleDescriptor { + return { + goImport: "", + usePackage: [], + canMigrateFrom: [] + }; +} + +export const ModuleDescriptor = { + encode(message: ModuleDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.goImport !== "") { + writer.uint32(10).string(message.goImport); + } + + for (const v of message.usePackage) { + PackageReference.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.canMigrateFrom) { + MigrateFromInfo.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.goImport = reader.string(); + break; + + case 2: + message.usePackage.push(PackageReference.decode(reader, reader.uint32())); + break; + + case 3: + message.canMigrateFrom.push(MigrateFromInfo.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ModuleDescriptor { + const message = createBaseModuleDescriptor(); + message.goImport = object.goImport ?? ""; + message.usePackage = object.usePackage?.map(e => PackageReference.fromPartial(e)) || []; + message.canMigrateFrom = object.canMigrateFrom?.map(e => MigrateFromInfo.fromPartial(e)) || []; + return message; + } + +}; + +function createBasePackageReference(): PackageReference { + return { + name: "", + revision: 0 + }; +} + +export const PackageReference = { + encode(message: PackageReference, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.revision !== 0) { + writer.uint32(16).uint32(message.revision); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PackageReference { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePackageReference(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.revision = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PackageReference { + const message = createBasePackageReference(); + message.name = object.name ?? ""; + message.revision = object.revision ?? 0; + return message; + } + +}; + +function createBaseMigrateFromInfo(): MigrateFromInfo { + return { + module: "" + }; +} + +export const MigrateFromInfo = { + encode(message: MigrateFromInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.module !== "") { + writer.uint32(10).string(message.module); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MigrateFromInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMigrateFromInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.module = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MigrateFromInfo { + const message = createBaseMigrateFromInfo(); + message.module = object.module ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/app/v1alpha1/query.rpc.Query.ts b/packages/codegen/src/cosmos/app/v1alpha1/query.rpc.Query.ts new file mode 100644 index 00000000..9ae7ca7f --- /dev/null +++ b/packages/codegen/src/cosmos/app/v1alpha1/query.rpc.Query.ts @@ -0,0 +1,35 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryConfigRequest, QueryConfigResponse } from "./query"; +/** Query is the app module query service. */ + +export interface Query { + /** Config returns the current app config. */ + config(request?: QueryConfigRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.config = this.config.bind(this); + } + + config(request: QueryConfigRequest = {}): Promise { + const data = QueryConfigRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.app.v1alpha1.Query", "Config", data); + return promise.then(data => QueryConfigResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + config(request?: QueryConfigRequest): Promise { + return queryService.config(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/app/v1alpha1/query.ts b/packages/codegen/src/cosmos/app/v1alpha1/query.ts new file mode 100644 index 00000000..f6f7509a --- /dev/null +++ b/packages/codegen/src/cosmos/app/v1alpha1/query.ts @@ -0,0 +1,99 @@ +import { Config, ConfigSDKType } from "./config"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryConfigRequest is the Query/Config request type. */ + +export interface QueryConfigRequest {} +/** QueryConfigRequest is the Query/Config request type. */ + +export interface QueryConfigRequestSDKType {} +/** QueryConfigRequest is the Query/Config response type. */ + +export interface QueryConfigResponse { + /** config is the current app config. */ + config?: Config; +} +/** QueryConfigRequest is the Query/Config response type. */ + +export interface QueryConfigResponseSDKType { + config?: ConfigSDKType; +} + +function createBaseQueryConfigRequest(): QueryConfigRequest { + return {}; +} + +export const QueryConfigRequest = { + encode(_: QueryConfigRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConfigRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConfigRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryConfigRequest { + const message = createBaseQueryConfigRequest(); + return message; + } + +}; + +function createBaseQueryConfigResponse(): QueryConfigResponse { + return { + config: undefined + }; +} + +export const QueryConfigResponse = { + encode(message: QueryConfigResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.config !== undefined) { + Config.encode(message.config, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConfigResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConfigResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.config = Config.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConfigResponse { + const message = createBaseQueryConfigResponse(); + message.config = object.config !== undefined && object.config !== null ? Config.fromPartial(object.config) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/auth/v1beta1/auth.ts b/packages/codegen/src/cosmos/auth/v1beta1/auth.ts new file mode 100644 index 00000000..2342a965 --- /dev/null +++ b/packages/codegen/src/cosmos/auth/v1beta1/auth.ts @@ -0,0 +1,284 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * BaseAccount defines a base account type. It contains all the necessary fields + * for basic account functionality. Any custom account type should extend this + * type for additional functionality (e.g. vesting). + */ + +export interface BaseAccount { + address: string; + pubKey?: Any; + accountNumber: Long; + sequence: Long; +} +/** + * BaseAccount defines a base account type. It contains all the necessary fields + * for basic account functionality. Any custom account type should extend this + * type for additional functionality (e.g. vesting). + */ + +export interface BaseAccountSDKType { + address: string; + pub_key?: AnySDKType; + account_number: Long; + sequence: Long; +} +/** ModuleAccount defines an account for modules that holds coins on a pool. */ + +export interface ModuleAccount { + baseAccount?: BaseAccount; + name: string; + permissions: string[]; +} +/** ModuleAccount defines an account for modules that holds coins on a pool. */ + +export interface ModuleAccountSDKType { + base_account?: BaseAccountSDKType; + name: string; + permissions: string[]; +} +/** Params defines the parameters for the auth module. */ + +export interface Params { + maxMemoCharacters: Long; + txSigLimit: Long; + txSizeCostPerByte: Long; + sigVerifyCostEd25519: Long; + sigVerifyCostSecp256k1: Long; +} +/** Params defines the parameters for the auth module. */ + +export interface ParamsSDKType { + max_memo_characters: Long; + tx_sig_limit: Long; + tx_size_cost_per_byte: Long; + sig_verify_cost_ed25519: Long; + sig_verify_cost_secp256k1: Long; +} + +function createBaseBaseAccount(): BaseAccount { + return { + address: "", + pubKey: undefined, + accountNumber: Long.UZERO, + sequence: Long.UZERO + }; +} + +export const BaseAccount = { + encode(message: BaseAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.pubKey !== undefined) { + Any.encode(message.pubKey, writer.uint32(18).fork()).ldelim(); + } + + if (!message.accountNumber.isZero()) { + writer.uint32(24).uint64(message.accountNumber); + } + + if (!message.sequence.isZero()) { + writer.uint32(32).uint64(message.sequence); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BaseAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBaseAccount(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.pubKey = Any.decode(reader, reader.uint32()); + break; + + case 3: + message.accountNumber = (reader.uint64() as Long); + break; + + case 4: + message.sequence = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BaseAccount { + const message = createBaseBaseAccount(); + message.address = object.address ?? ""; + message.pubKey = object.pubKey !== undefined && object.pubKey !== null ? Any.fromPartial(object.pubKey) : undefined; + message.accountNumber = object.accountNumber !== undefined && object.accountNumber !== null ? Long.fromValue(object.accountNumber) : Long.UZERO; + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + return message; + } + +}; + +function createBaseModuleAccount(): ModuleAccount { + return { + baseAccount: undefined, + name: "", + permissions: [] + }; +} + +export const ModuleAccount = { + encode(message: ModuleAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseAccount !== undefined) { + BaseAccount.encode(message.baseAccount, writer.uint32(10).fork()).ldelim(); + } + + if (message.name !== "") { + writer.uint32(18).string(message.name); + } + + for (const v of message.permissions) { + writer.uint32(26).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleAccount(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.baseAccount = BaseAccount.decode(reader, reader.uint32()); + break; + + case 2: + message.name = reader.string(); + break; + + case 3: + message.permissions.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ModuleAccount { + const message = createBaseModuleAccount(); + message.baseAccount = object.baseAccount !== undefined && object.baseAccount !== null ? BaseAccount.fromPartial(object.baseAccount) : undefined; + message.name = object.name ?? ""; + message.permissions = object.permissions?.map(e => e) || []; + return message; + } + +}; + +function createBaseParams(): Params { + return { + maxMemoCharacters: Long.UZERO, + txSigLimit: Long.UZERO, + txSizeCostPerByte: Long.UZERO, + sigVerifyCostEd25519: Long.UZERO, + sigVerifyCostSecp256k1: Long.UZERO + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.maxMemoCharacters.isZero()) { + writer.uint32(8).uint64(message.maxMemoCharacters); + } + + if (!message.txSigLimit.isZero()) { + writer.uint32(16).uint64(message.txSigLimit); + } + + if (!message.txSizeCostPerByte.isZero()) { + writer.uint32(24).uint64(message.txSizeCostPerByte); + } + + if (!message.sigVerifyCostEd25519.isZero()) { + writer.uint32(32).uint64(message.sigVerifyCostEd25519); + } + + if (!message.sigVerifyCostSecp256k1.isZero()) { + writer.uint32(40).uint64(message.sigVerifyCostSecp256k1); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.maxMemoCharacters = (reader.uint64() as Long); + break; + + case 2: + message.txSigLimit = (reader.uint64() as Long); + break; + + case 3: + message.txSizeCostPerByte = (reader.uint64() as Long); + break; + + case 4: + message.sigVerifyCostEd25519 = (reader.uint64() as Long); + break; + + case 5: + message.sigVerifyCostSecp256k1 = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Params { + const message = createBaseParams(); + message.maxMemoCharacters = object.maxMemoCharacters !== undefined && object.maxMemoCharacters !== null ? Long.fromValue(object.maxMemoCharacters) : Long.UZERO; + message.txSigLimit = object.txSigLimit !== undefined && object.txSigLimit !== null ? Long.fromValue(object.txSigLimit) : Long.UZERO; + message.txSizeCostPerByte = object.txSizeCostPerByte !== undefined && object.txSizeCostPerByte !== null ? Long.fromValue(object.txSizeCostPerByte) : Long.UZERO; + message.sigVerifyCostEd25519 = object.sigVerifyCostEd25519 !== undefined && object.sigVerifyCostEd25519 !== null ? Long.fromValue(object.sigVerifyCostEd25519) : Long.UZERO; + message.sigVerifyCostSecp256k1 = object.sigVerifyCostSecp256k1 !== undefined && object.sigVerifyCostSecp256k1 !== null ? Long.fromValue(object.sigVerifyCostSecp256k1) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/auth/v1beta1/genesis.ts b/packages/codegen/src/cosmos/auth/v1beta1/genesis.ts new file mode 100644 index 00000000..ac545792 --- /dev/null +++ b/packages/codegen/src/cosmos/auth/v1beta1/genesis.ts @@ -0,0 +1,74 @@ +import { Params, ParamsSDKType } from "./auth"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the auth module's genesis state. */ + +export interface GenesisState { + /** params defines all the paramaters of the module. */ + params?: Params; + /** accounts are the accounts present at genesis. */ + + accounts: Any[]; +} +/** GenesisState defines the auth module's genesis state. */ + +export interface GenesisStateSDKType { + params?: ParamsSDKType; + accounts: AnySDKType[]; +} + +function createBaseGenesisState(): GenesisState { + return { + params: undefined, + accounts: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.accounts) { + Any.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + case 2: + message.accounts.push(Any.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + message.accounts = object.accounts?.map(e => Any.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/auth/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/auth/v1beta1/query.lcd.ts new file mode 100644 index 00000000..83fdf31f --- /dev/null +++ b/packages/codegen/src/cosmos/auth/v1beta1/query.lcd.ts @@ -0,0 +1,83 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryAccountsRequest, QueryAccountsResponseSDKType, QueryAccountRequest, QueryAccountResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType, QueryModuleAccountsRequest, QueryModuleAccountsResponseSDKType, Bech32PrefixRequest, Bech32PrefixResponseSDKType, AddressBytesToStringRequest, AddressBytesToStringResponseSDKType, AddressStringToBytesRequest, AddressStringToBytesResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.accounts = this.accounts.bind(this); + this.account = this.account.bind(this); + this.params = this.params.bind(this); + this.moduleAccounts = this.moduleAccounts.bind(this); + this.bech32Prefix = this.bech32Prefix.bind(this); + this.addressBytesToString = this.addressBytesToString.bind(this); + this.addressStringToBytes = this.addressStringToBytes.bind(this); + } + /* Accounts returns all the existing accounts + + Since: cosmos-sdk 0.43 */ + + + async accounts(params: QueryAccountsRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/auth/v1beta1/accounts`; + return await this.req.get(endpoint, options); + } + /* Account returns account details based on address. */ + + + async account(params: QueryAccountRequest): Promise { + const endpoint = `cosmos/auth/v1beta1/accounts/${params.address}`; + return await this.req.get(endpoint); + } + /* Params queries all parameters. */ + + + async params(_params: QueryParamsRequest = {}): Promise { + const endpoint = `cosmos/auth/v1beta1/params`; + return await this.req.get(endpoint); + } + /* ModuleAccounts returns all the existing module accounts. */ + + + async moduleAccounts(_params: QueryModuleAccountsRequest = {}): Promise { + const endpoint = `cosmos/auth/v1beta1/module_accounts`; + return await this.req.get(endpoint); + } + /* Bech32 queries bech32Prefix */ + + + async bech32Prefix(_params: Bech32PrefixRequest = {}): Promise { + const endpoint = `cosmos/auth/v1beta1/bech32`; + return await this.req.get(endpoint); + } + /* AddressBytesToString converts Account Address bytes to string */ + + + async addressBytesToString(params: AddressBytesToStringRequest): Promise { + const endpoint = `cosmos/auth/v1beta1/bech32/${params.addressBytes}`; + return await this.req.get(endpoint); + } + /* AddressStringToBytes converts Address string to bytes */ + + + async addressStringToBytes(params: AddressStringToBytesRequest): Promise { + const endpoint = `cosmos/auth/v1beta1/bech32/${params.addressString}`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/auth/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/auth/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..bd0c78f3 --- /dev/null +++ b/packages/codegen/src/cosmos/auth/v1beta1/query.rpc.Query.ts @@ -0,0 +1,125 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryAccountsRequest, QueryAccountsResponse, QueryAccountRequest, QueryAccountResponse, QueryParamsRequest, QueryParamsResponse, QueryModuleAccountsRequest, QueryModuleAccountsResponse, Bech32PrefixRequest, Bech32PrefixResponse, AddressBytesToStringRequest, AddressBytesToStringResponse, AddressStringToBytesRequest, AddressStringToBytesResponse } from "./query"; +/** Query defines the gRPC querier service. */ + +export interface Query { + /** + * Accounts returns all the existing accounts + * + * Since: cosmos-sdk 0.43 + */ + accounts(request?: QueryAccountsRequest): Promise; + /** Account returns account details based on address. */ + + account(request: QueryAccountRequest): Promise; + /** Params queries all parameters. */ + + params(request?: QueryParamsRequest): Promise; + /** ModuleAccounts returns all the existing module accounts. */ + + moduleAccounts(request?: QueryModuleAccountsRequest): Promise; + /** Bech32 queries bech32Prefix */ + + bech32Prefix(request?: Bech32PrefixRequest): Promise; + /** AddressBytesToString converts Account Address bytes to string */ + + addressBytesToString(request: AddressBytesToStringRequest): Promise; + /** AddressStringToBytes converts Address string to bytes */ + + addressStringToBytes(request: AddressStringToBytesRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.accounts = this.accounts.bind(this); + this.account = this.account.bind(this); + this.params = this.params.bind(this); + this.moduleAccounts = this.moduleAccounts.bind(this); + this.bech32Prefix = this.bech32Prefix.bind(this); + this.addressBytesToString = this.addressBytesToString.bind(this); + this.addressStringToBytes = this.addressStringToBytes.bind(this); + } + + accounts(request: QueryAccountsRequest = { + pagination: undefined + }): Promise { + const data = QueryAccountsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "Accounts", data); + return promise.then(data => QueryAccountsResponse.decode(new _m0.Reader(data))); + } + + account(request: QueryAccountRequest): Promise { + const data = QueryAccountRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "Account", data); + return promise.then(data => QueryAccountResponse.decode(new _m0.Reader(data))); + } + + params(request: QueryParamsRequest = {}): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + moduleAccounts(request: QueryModuleAccountsRequest = {}): Promise { + const data = QueryModuleAccountsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "ModuleAccounts", data); + return promise.then(data => QueryModuleAccountsResponse.decode(new _m0.Reader(data))); + } + + bech32Prefix(request: Bech32PrefixRequest = {}): Promise { + const data = Bech32PrefixRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "Bech32Prefix", data); + return promise.then(data => Bech32PrefixResponse.decode(new _m0.Reader(data))); + } + + addressBytesToString(request: AddressBytesToStringRequest): Promise { + const data = AddressBytesToStringRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "AddressBytesToString", data); + return promise.then(data => AddressBytesToStringResponse.decode(new _m0.Reader(data))); + } + + addressStringToBytes(request: AddressStringToBytesRequest): Promise { + const data = AddressStringToBytesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "AddressStringToBytes", data); + return promise.then(data => AddressStringToBytesResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + accounts(request?: QueryAccountsRequest): Promise { + return queryService.accounts(request); + }, + + account(request: QueryAccountRequest): Promise { + return queryService.account(request); + }, + + params(request?: QueryParamsRequest): Promise { + return queryService.params(request); + }, + + moduleAccounts(request?: QueryModuleAccountsRequest): Promise { + return queryService.moduleAccounts(request); + }, + + bech32Prefix(request?: Bech32PrefixRequest): Promise { + return queryService.bech32Prefix(request); + }, + + addressBytesToString(request: AddressBytesToStringRequest): Promise { + return queryService.addressBytesToString(request); + }, + + addressStringToBytes(request: AddressStringToBytesRequest): Promise { + return queryService.addressStringToBytes(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/auth/v1beta1/query.ts b/packages/codegen/src/cosmos/auth/v1beta1/query.ts new file mode 100644 index 00000000..d9f110dc --- /dev/null +++ b/packages/codegen/src/cosmos/auth/v1beta1/query.ts @@ -0,0 +1,765 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Params, ParamsSDKType } from "./auth"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * QueryAccountsRequest is the request type for the Query/Accounts RPC method. + * + * Since: cosmos-sdk 0.43 + */ + +export interface QueryAccountsRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryAccountsRequest is the request type for the Query/Accounts RPC method. + * + * Since: cosmos-sdk 0.43 + */ + +export interface QueryAccountsRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryAccountsResponse is the response type for the Query/Accounts RPC method. + * + * Since: cosmos-sdk 0.43 + */ + +export interface QueryAccountsResponse { + /** accounts are the existing accounts */ + accounts: Any[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryAccountsResponse is the response type for the Query/Accounts RPC method. + * + * Since: cosmos-sdk 0.43 + */ + +export interface QueryAccountsResponseSDKType { + accounts: AnySDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryAccountRequest is the request type for the Query/Account RPC method. */ + +export interface QueryAccountRequest { + /** address defines the address to query for. */ + address: string; +} +/** QueryAccountRequest is the request type for the Query/Account RPC method. */ + +export interface QueryAccountRequestSDKType { + address: string; +} +/** QueryModuleAccountsRequest is the request type for the Query/ModuleAccounts RPC method. */ + +export interface QueryModuleAccountsRequest {} +/** QueryModuleAccountsRequest is the request type for the Query/ModuleAccounts RPC method. */ + +export interface QueryModuleAccountsRequestSDKType {} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params?: Params; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +/** QueryAccountResponse is the response type for the Query/Account RPC method. */ + +export interface QueryAccountResponse { + /** account defines the account of the corresponding address. */ + account?: Any; +} +/** QueryAccountResponse is the response type for the Query/Account RPC method. */ + +export interface QueryAccountResponseSDKType { + account?: AnySDKType; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequest {} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequestSDKType {} +/** QueryModuleAccountsResponse is the response type for the Query/ModuleAccounts RPC method. */ + +export interface QueryModuleAccountsResponse { + accounts: Any[]; +} +/** QueryModuleAccountsResponse is the response type for the Query/ModuleAccounts RPC method. */ + +export interface QueryModuleAccountsResponseSDKType { + accounts: AnySDKType[]; +} +/** Bech32PrefixRequest is the request type for Bech32Prefix rpc method */ + +export interface Bech32PrefixRequest {} +/** Bech32PrefixRequest is the request type for Bech32Prefix rpc method */ + +export interface Bech32PrefixRequestSDKType {} +/** Bech32PrefixResponse is the response type for Bech32Prefix rpc method */ + +export interface Bech32PrefixResponse { + bech32Prefix: string; +} +/** Bech32PrefixResponse is the response type for Bech32Prefix rpc method */ + +export interface Bech32PrefixResponseSDKType { + bech32_prefix: string; +} +/** AddressBytesToStringRequest is the request type for AddressString rpc method */ + +export interface AddressBytesToStringRequest { + addressBytes: Uint8Array; +} +/** AddressBytesToStringRequest is the request type for AddressString rpc method */ + +export interface AddressBytesToStringRequestSDKType { + address_bytes: Uint8Array; +} +/** AddressBytesToStringResponse is the response type for AddressString rpc method */ + +export interface AddressBytesToStringResponse { + addressString: string; +} +/** AddressBytesToStringResponse is the response type for AddressString rpc method */ + +export interface AddressBytesToStringResponseSDKType { + address_string: string; +} +/** AddressStringToBytesRequest is the request type for AccountBytes rpc method */ + +export interface AddressStringToBytesRequest { + addressString: string; +} +/** AddressStringToBytesRequest is the request type for AccountBytes rpc method */ + +export interface AddressStringToBytesRequestSDKType { + address_string: string; +} +/** AddressStringToBytesResponse is the response type for AddressBytes rpc method */ + +export interface AddressStringToBytesResponse { + addressBytes: Uint8Array; +} +/** AddressStringToBytesResponse is the response type for AddressBytes rpc method */ + +export interface AddressStringToBytesResponseSDKType { + address_bytes: Uint8Array; +} + +function createBaseQueryAccountsRequest(): QueryAccountsRequest { + return { + pagination: undefined + }; +} + +export const QueryAccountsRequest = { + encode(message: QueryAccountsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAccountsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAccountsRequest { + const message = createBaseQueryAccountsRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryAccountsResponse(): QueryAccountsResponse { + return { + accounts: [], + pagination: undefined + }; +} + +export const QueryAccountsResponse = { + encode(message: QueryAccountsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.accounts) { + Any.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAccountsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.accounts.push(Any.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAccountsResponse { + const message = createBaseQueryAccountsResponse(); + message.accounts = object.accounts?.map(e => Any.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryAccountRequest(): QueryAccountRequest { + return { + address: "" + }; +} + +export const QueryAccountRequest = { + encode(message: QueryAccountRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAccountRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAccountRequest { + const message = createBaseQueryAccountRequest(); + message.address = object.address ?? ""; + return message; + } + +}; + +function createBaseQueryModuleAccountsRequest(): QueryModuleAccountsRequest { + return {}; +} + +export const QueryModuleAccountsRequest = { + encode(_: QueryModuleAccountsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleAccountsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryModuleAccountsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryModuleAccountsRequest { + const message = createBaseQueryModuleAccountsRequest(); + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + params: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseQueryAccountResponse(): QueryAccountResponse { + return { + account: undefined + }; +} + +export const QueryAccountResponse = { + encode(message: QueryAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.account !== undefined) { + Any.encode(message.account, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAccountResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.account = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAccountResponse { + const message = createBaseQueryAccountResponse(); + message.account = object.account !== undefined && object.account !== null ? Any.fromPartial(object.account) : undefined; + return message; + } + +}; + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + } + +}; + +function createBaseQueryModuleAccountsResponse(): QueryModuleAccountsResponse { + return { + accounts: [] + }; +} + +export const QueryModuleAccountsResponse = { + encode(message: QueryModuleAccountsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.accounts) { + Any.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleAccountsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryModuleAccountsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.accounts.push(Any.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryModuleAccountsResponse { + const message = createBaseQueryModuleAccountsResponse(); + message.accounts = object.accounts?.map(e => Any.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseBech32PrefixRequest(): Bech32PrefixRequest { + return {}; +} + +export const Bech32PrefixRequest = { + encode(_: Bech32PrefixRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Bech32PrefixRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBech32PrefixRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): Bech32PrefixRequest { + const message = createBaseBech32PrefixRequest(); + return message; + } + +}; + +function createBaseBech32PrefixResponse(): Bech32PrefixResponse { + return { + bech32Prefix: "" + }; +} + +export const Bech32PrefixResponse = { + encode(message: Bech32PrefixResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bech32Prefix !== "") { + writer.uint32(10).string(message.bech32Prefix); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Bech32PrefixResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBech32PrefixResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.bech32Prefix = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Bech32PrefixResponse { + const message = createBaseBech32PrefixResponse(); + message.bech32Prefix = object.bech32Prefix ?? ""; + return message; + } + +}; + +function createBaseAddressBytesToStringRequest(): AddressBytesToStringRequest { + return { + addressBytes: new Uint8Array() + }; +} + +export const AddressBytesToStringRequest = { + encode(message: AddressBytesToStringRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.addressBytes.length !== 0) { + writer.uint32(10).bytes(message.addressBytes); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AddressBytesToStringRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAddressBytesToStringRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.addressBytes = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AddressBytesToStringRequest { + const message = createBaseAddressBytesToStringRequest(); + message.addressBytes = object.addressBytes ?? new Uint8Array(); + return message; + } + +}; + +function createBaseAddressBytesToStringResponse(): AddressBytesToStringResponse { + return { + addressString: "" + }; +} + +export const AddressBytesToStringResponse = { + encode(message: AddressBytesToStringResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.addressString !== "") { + writer.uint32(10).string(message.addressString); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AddressBytesToStringResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAddressBytesToStringResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.addressString = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AddressBytesToStringResponse { + const message = createBaseAddressBytesToStringResponse(); + message.addressString = object.addressString ?? ""; + return message; + } + +}; + +function createBaseAddressStringToBytesRequest(): AddressStringToBytesRequest { + return { + addressString: "" + }; +} + +export const AddressStringToBytesRequest = { + encode(message: AddressStringToBytesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.addressString !== "") { + writer.uint32(10).string(message.addressString); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AddressStringToBytesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAddressStringToBytesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.addressString = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AddressStringToBytesRequest { + const message = createBaseAddressStringToBytesRequest(); + message.addressString = object.addressString ?? ""; + return message; + } + +}; + +function createBaseAddressStringToBytesResponse(): AddressStringToBytesResponse { + return { + addressBytes: new Uint8Array() + }; +} + +export const AddressStringToBytesResponse = { + encode(message: AddressStringToBytesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.addressBytes.length !== 0) { + writer.uint32(10).bytes(message.addressBytes); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AddressStringToBytesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAddressStringToBytesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.addressBytes = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AddressStringToBytesResponse { + const message = createBaseAddressStringToBytesResponse(); + message.addressBytes = object.addressBytes ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/authz/v1beta1/authz.ts b/packages/codegen/src/cosmos/authz/v1beta1/authz.ts new file mode 100644 index 00000000..0bae9810 --- /dev/null +++ b/packages/codegen/src/cosmos/authz/v1beta1/authz.ts @@ -0,0 +1,298 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Timestamp } from "../../../google/protobuf/timestamp"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, toTimestamp, fromTimestamp } from "../../../helpers"; +/** + * GenericAuthorization gives the grantee unrestricted permissions to execute + * the provided method on behalf of the granter's account. + */ + +export interface GenericAuthorization { + /** Msg, identified by it's type URL, to grant unrestricted permissions to execute */ + msg: string; +} +/** + * GenericAuthorization gives the grantee unrestricted permissions to execute + * the provided method on behalf of the granter's account. + */ + +export interface GenericAuthorizationSDKType { + msg: string; +} +/** + * Grant gives permissions to execute + * the provide method with expiration time. + */ + +export interface Grant { + authorization?: Any; + /** + * time when the grant will expire and will be pruned. If null, then the grant + * doesn't have a time expiration (other conditions in `authorization` + * may apply to invalidate the grant) + */ + + expiration?: Date; +} +/** + * Grant gives permissions to execute + * the provide method with expiration time. + */ + +export interface GrantSDKType { + authorization?: AnySDKType; + expiration?: Date; +} +/** + * GrantAuthorization extends a grant with both the addresses of the grantee and granter. + * It is used in genesis.proto and query.proto + */ + +export interface GrantAuthorization { + granter: string; + grantee: string; + authorization?: Any; + expiration?: Date; +} +/** + * GrantAuthorization extends a grant with both the addresses of the grantee and granter. + * It is used in genesis.proto and query.proto + */ + +export interface GrantAuthorizationSDKType { + granter: string; + grantee: string; + authorization?: AnySDKType; + expiration?: Date; +} +/** GrantQueueItem contains the list of TypeURL of a sdk.Msg. */ + +export interface GrantQueueItem { + /** msg_type_urls contains the list of TypeURL of a sdk.Msg. */ + msgTypeUrls: string[]; +} +/** GrantQueueItem contains the list of TypeURL of a sdk.Msg. */ + +export interface GrantQueueItemSDKType { + msg_type_urls: string[]; +} + +function createBaseGenericAuthorization(): GenericAuthorization { + return { + msg: "" + }; +} + +export const GenericAuthorization = { + encode(message: GenericAuthorization, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.msg !== "") { + writer.uint32(10).string(message.msg); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenericAuthorization { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenericAuthorization(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.msg = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenericAuthorization { + const message = createBaseGenericAuthorization(); + message.msg = object.msg ?? ""; + return message; + } + +}; + +function createBaseGrant(): Grant { + return { + authorization: undefined, + expiration: undefined + }; +} + +export const Grant = { + encode(message: Grant, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authorization !== undefined) { + Any.encode(message.authorization, writer.uint32(10).fork()).ldelim(); + } + + if (message.expiration !== undefined) { + Timestamp.encode(toTimestamp(message.expiration), writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Grant { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGrant(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authorization = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.expiration = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Grant { + const message = createBaseGrant(); + message.authorization = object.authorization !== undefined && object.authorization !== null ? Any.fromPartial(object.authorization) : undefined; + message.expiration = object.expiration ?? undefined; + return message; + } + +}; + +function createBaseGrantAuthorization(): GrantAuthorization { + return { + granter: "", + grantee: "", + authorization: undefined, + expiration: undefined + }; +} + +export const GrantAuthorization = { + encode(message: GrantAuthorization, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.granter !== "") { + writer.uint32(10).string(message.granter); + } + + if (message.grantee !== "") { + writer.uint32(18).string(message.grantee); + } + + if (message.authorization !== undefined) { + Any.encode(message.authorization, writer.uint32(26).fork()).ldelim(); + } + + if (message.expiration !== undefined) { + Timestamp.encode(toTimestamp(message.expiration), writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GrantAuthorization { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGrantAuthorization(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.granter = reader.string(); + break; + + case 2: + message.grantee = reader.string(); + break; + + case 3: + message.authorization = Any.decode(reader, reader.uint32()); + break; + + case 4: + message.expiration = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GrantAuthorization { + const message = createBaseGrantAuthorization(); + message.granter = object.granter ?? ""; + message.grantee = object.grantee ?? ""; + message.authorization = object.authorization !== undefined && object.authorization !== null ? Any.fromPartial(object.authorization) : undefined; + message.expiration = object.expiration ?? undefined; + return message; + } + +}; + +function createBaseGrantQueueItem(): GrantQueueItem { + return { + msgTypeUrls: [] + }; +} + +export const GrantQueueItem = { + encode(message: GrantQueueItem, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.msgTypeUrls) { + writer.uint32(10).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GrantQueueItem { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGrantQueueItem(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.msgTypeUrls.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GrantQueueItem { + const message = createBaseGrantQueueItem(); + message.msgTypeUrls = object.msgTypeUrls?.map(e => e) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/authz/v1beta1/event.ts b/packages/codegen/src/cosmos/authz/v1beta1/event.ts new file mode 100644 index 00000000..287d4b8c --- /dev/null +++ b/packages/codegen/src/cosmos/authz/v1beta1/event.ts @@ -0,0 +1,170 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** EventGrant is emitted on Msg/Grant */ + +export interface EventGrant { + /** Msg type URL for which an autorization is granted */ + msgTypeUrl: string; + /** Granter account address */ + + granter: string; + /** Grantee account address */ + + grantee: string; +} +/** EventGrant is emitted on Msg/Grant */ + +export interface EventGrantSDKType { + msg_type_url: string; + granter: string; + grantee: string; +} +/** EventRevoke is emitted on Msg/Revoke */ + +export interface EventRevoke { + /** Msg type URL for which an autorization is revoked */ + msgTypeUrl: string; + /** Granter account address */ + + granter: string; + /** Grantee account address */ + + grantee: string; +} +/** EventRevoke is emitted on Msg/Revoke */ + +export interface EventRevokeSDKType { + msg_type_url: string; + granter: string; + grantee: string; +} + +function createBaseEventGrant(): EventGrant { + return { + msgTypeUrl: "", + granter: "", + grantee: "" + }; +} + +export const EventGrant = { + encode(message: EventGrant, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.msgTypeUrl !== "") { + writer.uint32(18).string(message.msgTypeUrl); + } + + if (message.granter !== "") { + writer.uint32(26).string(message.granter); + } + + if (message.grantee !== "") { + writer.uint32(34).string(message.grantee); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventGrant { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventGrant(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 2: + message.msgTypeUrl = reader.string(); + break; + + case 3: + message.granter = reader.string(); + break; + + case 4: + message.grantee = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventGrant { + const message = createBaseEventGrant(); + message.msgTypeUrl = object.msgTypeUrl ?? ""; + message.granter = object.granter ?? ""; + message.grantee = object.grantee ?? ""; + return message; + } + +}; + +function createBaseEventRevoke(): EventRevoke { + return { + msgTypeUrl: "", + granter: "", + grantee: "" + }; +} + +export const EventRevoke = { + encode(message: EventRevoke, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.msgTypeUrl !== "") { + writer.uint32(18).string(message.msgTypeUrl); + } + + if (message.granter !== "") { + writer.uint32(26).string(message.granter); + } + + if (message.grantee !== "") { + writer.uint32(34).string(message.grantee); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventRevoke { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventRevoke(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 2: + message.msgTypeUrl = reader.string(); + break; + + case 3: + message.granter = reader.string(); + break; + + case 4: + message.grantee = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventRevoke { + const message = createBaseEventRevoke(); + message.msgTypeUrl = object.msgTypeUrl ?? ""; + message.granter = object.granter ?? ""; + message.grantee = object.grantee ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/authz/v1beta1/genesis.ts b/packages/codegen/src/cosmos/authz/v1beta1/genesis.ts new file mode 100644 index 00000000..037ff238 --- /dev/null +++ b/packages/codegen/src/cosmos/authz/v1beta1/genesis.ts @@ -0,0 +1,58 @@ +import { GrantAuthorization, GrantAuthorizationSDKType } from "./authz"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the authz module's genesis state. */ + +export interface GenesisState { + authorization: GrantAuthorization[]; +} +/** GenesisState defines the authz module's genesis state. */ + +export interface GenesisStateSDKType { + authorization: GrantAuthorizationSDKType[]; +} + +function createBaseGenesisState(): GenesisState { + return { + authorization: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.authorization) { + GrantAuthorization.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authorization.push(GrantAuthorization.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.authorization = object.authorization?.map(e => GrantAuthorization.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/authz/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/authz/v1beta1/query.lcd.ts new file mode 100644 index 00000000..0a8df359 --- /dev/null +++ b/packages/codegen/src/cosmos/authz/v1beta1/query.lcd.ts @@ -0,0 +1,79 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryGrantsRequest, QueryGrantsResponseSDKType, QueryGranterGrantsRequest, QueryGranterGrantsResponseSDKType, QueryGranteeGrantsRequest, QueryGranteeGrantsResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.grants = this.grants.bind(this); + this.granterGrants = this.granterGrants.bind(this); + this.granteeGrants = this.granteeGrants.bind(this); + } + /* Returns list of `Authorization`, granted to the grantee by the granter. */ + + + async grants(params: QueryGrantsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.granter !== "undefined") { + options.params.granter = params.granter; + } + + if (typeof params?.grantee !== "undefined") { + options.params.grantee = params.grantee; + } + + if (typeof params?.msgTypeUrl !== "undefined") { + options.params.msg_type_url = params.msgTypeUrl; + } + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/authz/v1beta1/grants`; + return await this.req.get(endpoint, options); + } + /* GranterGrants returns list of `GrantAuthorization`, granted by granter. + + Since: cosmos-sdk 0.46 */ + + + async granterGrants(params: QueryGranterGrantsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/authz/v1beta1/grants/granter/${params.granter}`; + return await this.req.get(endpoint, options); + } + /* GranteeGrants returns a list of `GrantAuthorization` by grantee. + + Since: cosmos-sdk 0.46 */ + + + async granteeGrants(params: QueryGranteeGrantsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/authz/v1beta1/grants/grantee/${params.grantee}`; + return await this.req.get(endpoint, options); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/authz/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/authz/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..c2fda460 --- /dev/null +++ b/packages/codegen/src/cosmos/authz/v1beta1/query.rpc.Query.ts @@ -0,0 +1,71 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryGrantsRequest, QueryGrantsResponse, QueryGranterGrantsRequest, QueryGranterGrantsResponse, QueryGranteeGrantsRequest, QueryGranteeGrantsResponse } from "./query"; +/** Query defines the gRPC querier service. */ + +export interface Query { + /** Returns list of `Authorization`, granted to the grantee by the granter. */ + grants(request: QueryGrantsRequest): Promise; + /** + * GranterGrants returns list of `GrantAuthorization`, granted by granter. + * + * Since: cosmos-sdk 0.46 + */ + + granterGrants(request: QueryGranterGrantsRequest): Promise; + /** + * GranteeGrants returns a list of `GrantAuthorization` by grantee. + * + * Since: cosmos-sdk 0.46 + */ + + granteeGrants(request: QueryGranteeGrantsRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.grants = this.grants.bind(this); + this.granterGrants = this.granterGrants.bind(this); + this.granteeGrants = this.granteeGrants.bind(this); + } + + grants(request: QueryGrantsRequest): Promise { + const data = QueryGrantsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.authz.v1beta1.Query", "Grants", data); + return promise.then(data => QueryGrantsResponse.decode(new _m0.Reader(data))); + } + + granterGrants(request: QueryGranterGrantsRequest): Promise { + const data = QueryGranterGrantsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.authz.v1beta1.Query", "GranterGrants", data); + return promise.then(data => QueryGranterGrantsResponse.decode(new _m0.Reader(data))); + } + + granteeGrants(request: QueryGranteeGrantsRequest): Promise { + const data = QueryGranteeGrantsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.authz.v1beta1.Query", "GranteeGrants", data); + return promise.then(data => QueryGranteeGrantsResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + grants(request: QueryGrantsRequest): Promise { + return queryService.grants(request); + }, + + granterGrants(request: QueryGranterGrantsRequest): Promise { + return queryService.granterGrants(request); + }, + + granteeGrants(request: QueryGranteeGrantsRequest): Promise { + return queryService.granteeGrants(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/authz/v1beta1/query.ts b/packages/codegen/src/cosmos/authz/v1beta1/query.ts new file mode 100644 index 00000000..7b93128e --- /dev/null +++ b/packages/codegen/src/cosmos/authz/v1beta1/query.ts @@ -0,0 +1,447 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Grant, GrantSDKType, GrantAuthorization, GrantAuthorizationSDKType } from "./authz"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryGrantsRequest is the request type for the Query/Grants RPC method. */ + +export interface QueryGrantsRequest { + granter: string; + grantee: string; + /** Optional, msg_type_url, when set, will query only grants matching given msg type. */ + + msgTypeUrl: string; + /** pagination defines an pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryGrantsRequest is the request type for the Query/Grants RPC method. */ + +export interface QueryGrantsRequestSDKType { + granter: string; + grantee: string; + msg_type_url: string; + pagination?: PageRequestSDKType; +} +/** QueryGrantsResponse is the response type for the Query/Authorizations RPC method. */ + +export interface QueryGrantsResponse { + /** authorizations is a list of grants granted for grantee by granter. */ + grants: Grant[]; + /** pagination defines an pagination for the response. */ + + pagination?: PageResponse; +} +/** QueryGrantsResponse is the response type for the Query/Authorizations RPC method. */ + +export interface QueryGrantsResponseSDKType { + grants: GrantSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryGranterGrantsRequest is the request type for the Query/GranterGrants RPC method. */ + +export interface QueryGranterGrantsRequest { + granter: string; + /** pagination defines an pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryGranterGrantsRequest is the request type for the Query/GranterGrants RPC method. */ + +export interface QueryGranterGrantsRequestSDKType { + granter: string; + pagination?: PageRequestSDKType; +} +/** QueryGranterGrantsResponse is the response type for the Query/GranterGrants RPC method. */ + +export interface QueryGranterGrantsResponse { + /** grants is a list of grants granted by the granter. */ + grants: GrantAuthorization[]; + /** pagination defines an pagination for the response. */ + + pagination?: PageResponse; +} +/** QueryGranterGrantsResponse is the response type for the Query/GranterGrants RPC method. */ + +export interface QueryGranterGrantsResponseSDKType { + grants: GrantAuthorizationSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryGranteeGrantsRequest is the request type for the Query/IssuedGrants RPC method. */ + +export interface QueryGranteeGrantsRequest { + grantee: string; + /** pagination defines an pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryGranteeGrantsRequest is the request type for the Query/IssuedGrants RPC method. */ + +export interface QueryGranteeGrantsRequestSDKType { + grantee: string; + pagination?: PageRequestSDKType; +} +/** QueryGranteeGrantsResponse is the response type for the Query/GranteeGrants RPC method. */ + +export interface QueryGranteeGrantsResponse { + /** grants is a list of grants granted to the grantee. */ + grants: GrantAuthorization[]; + /** pagination defines an pagination for the response. */ + + pagination?: PageResponse; +} +/** QueryGranteeGrantsResponse is the response type for the Query/GranteeGrants RPC method. */ + +export interface QueryGranteeGrantsResponseSDKType { + grants: GrantAuthorizationSDKType[]; + pagination?: PageResponseSDKType; +} + +function createBaseQueryGrantsRequest(): QueryGrantsRequest { + return { + granter: "", + grantee: "", + msgTypeUrl: "", + pagination: undefined + }; +} + +export const QueryGrantsRequest = { + encode(message: QueryGrantsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.granter !== "") { + writer.uint32(10).string(message.granter); + } + + if (message.grantee !== "") { + writer.uint32(18).string(message.grantee); + } + + if (message.msgTypeUrl !== "") { + writer.uint32(26).string(message.msgTypeUrl); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGrantsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGrantsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.granter = reader.string(); + break; + + case 2: + message.grantee = reader.string(); + break; + + case 3: + message.msgTypeUrl = reader.string(); + break; + + case 4: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGrantsRequest { + const message = createBaseQueryGrantsRequest(); + message.granter = object.granter ?? ""; + message.grantee = object.grantee ?? ""; + message.msgTypeUrl = object.msgTypeUrl ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGrantsResponse(): QueryGrantsResponse { + return { + grants: [], + pagination: undefined + }; +} + +export const QueryGrantsResponse = { + encode(message: QueryGrantsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.grants) { + Grant.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGrantsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGrantsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.grants.push(Grant.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGrantsResponse { + const message = createBaseQueryGrantsResponse(); + message.grants = object.grants?.map(e => Grant.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGranterGrantsRequest(): QueryGranterGrantsRequest { + return { + granter: "", + pagination: undefined + }; +} + +export const QueryGranterGrantsRequest = { + encode(message: QueryGranterGrantsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.granter !== "") { + writer.uint32(10).string(message.granter); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGranterGrantsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGranterGrantsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.granter = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGranterGrantsRequest { + const message = createBaseQueryGranterGrantsRequest(); + message.granter = object.granter ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGranterGrantsResponse(): QueryGranterGrantsResponse { + return { + grants: [], + pagination: undefined + }; +} + +export const QueryGranterGrantsResponse = { + encode(message: QueryGranterGrantsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.grants) { + GrantAuthorization.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGranterGrantsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGranterGrantsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.grants.push(GrantAuthorization.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGranterGrantsResponse { + const message = createBaseQueryGranterGrantsResponse(); + message.grants = object.grants?.map(e => GrantAuthorization.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGranteeGrantsRequest(): QueryGranteeGrantsRequest { + return { + grantee: "", + pagination: undefined + }; +} + +export const QueryGranteeGrantsRequest = { + encode(message: QueryGranteeGrantsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.grantee !== "") { + writer.uint32(10).string(message.grantee); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGranteeGrantsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGranteeGrantsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.grantee = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGranteeGrantsRequest { + const message = createBaseQueryGranteeGrantsRequest(); + message.grantee = object.grantee ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGranteeGrantsResponse(): QueryGranteeGrantsResponse { + return { + grants: [], + pagination: undefined + }; +} + +export const QueryGranteeGrantsResponse = { + encode(message: QueryGranteeGrantsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.grants) { + GrantAuthorization.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGranteeGrantsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGranteeGrantsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.grants.push(GrantAuthorization.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGranteeGrantsResponse { + const message = createBaseQueryGranteeGrantsResponse(); + message.grants = object.grants?.map(e => GrantAuthorization.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/authz/v1beta1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/authz/v1beta1/tx.rpc.msg.ts new file mode 100644 index 00000000..63499c42 --- /dev/null +++ b/packages/codegen/src/cosmos/authz/v1beta1/tx.rpc.msg.ts @@ -0,0 +1,56 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgGrant, MsgGrantResponse, MsgExec, MsgExecResponse, MsgRevoke, MsgRevokeResponse } from "./tx"; +/** Msg defines the authz Msg service. */ + +export interface Msg { + /** + * Grant grants the provided authorization to the grantee on the granter's + * account with the provided expiration time. If there is already a grant + * for the given (granter, grantee, Authorization) triple, then the grant + * will be overwritten. + */ + grant(request: MsgGrant): Promise; + /** + * Exec attempts to execute the provided messages using + * authorizations granted to the grantee. Each message should have only + * one signer corresponding to the granter of the authorization. + */ + + exec(request: MsgExec): Promise; + /** + * Revoke revokes any authorization corresponding to the provided method name on the + * granter's account that has been granted to the grantee. + */ + + revoke(request: MsgRevoke): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.grant = this.grant.bind(this); + this.exec = this.exec.bind(this); + this.revoke = this.revoke.bind(this); + } + + grant(request: MsgGrant): Promise { + const data = MsgGrant.encode(request).finish(); + const promise = this.rpc.request("cosmos.authz.v1beta1.Msg", "Grant", data); + return promise.then(data => MsgGrantResponse.decode(new _m0.Reader(data))); + } + + exec(request: MsgExec): Promise { + const data = MsgExec.encode(request).finish(); + const promise = this.rpc.request("cosmos.authz.v1beta1.Msg", "Exec", data); + return promise.then(data => MsgExecResponse.decode(new _m0.Reader(data))); + } + + revoke(request: MsgRevoke): Promise { + const data = MsgRevoke.encode(request).finish(); + const promise = this.rpc.request("cosmos.authz.v1beta1.Msg", "Revoke", data); + return promise.then(data => MsgRevokeResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/authz/v1beta1/tx.ts b/packages/codegen/src/cosmos/authz/v1beta1/tx.ts new file mode 100644 index 00000000..c9c30034 --- /dev/null +++ b/packages/codegen/src/cosmos/authz/v1beta1/tx.ts @@ -0,0 +1,390 @@ +import { Grant, GrantSDKType } from "./authz"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * MsgGrant is a request type for Grant method. It declares authorization to the grantee + * on behalf of the granter with the provided expiration time. + */ + +export interface MsgGrant { + granter: string; + grantee: string; + grant?: Grant; +} +/** + * MsgGrant is a request type for Grant method. It declares authorization to the grantee + * on behalf of the granter with the provided expiration time. + */ + +export interface MsgGrantSDKType { + granter: string; + grantee: string; + grant?: GrantSDKType; +} +/** MsgExecResponse defines the Msg/MsgExecResponse response type. */ + +export interface MsgExecResponse { + results: Uint8Array[]; +} +/** MsgExecResponse defines the Msg/MsgExecResponse response type. */ + +export interface MsgExecResponseSDKType { + results: Uint8Array[]; +} +/** + * MsgExec attempts to execute the provided messages using + * authorizations granted to the grantee. Each message should have only + * one signer corresponding to the granter of the authorization. + */ + +export interface MsgExec { + grantee: string; + /** + * Authorization Msg requests to execute. Each msg must implement Authorization interface + * The x/authz will try to find a grant matching (msg.signers[0], grantee, MsgTypeURL(msg)) + * triple and validate it. + */ + + msgs: Any[]; +} +/** + * MsgExec attempts to execute the provided messages using + * authorizations granted to the grantee. Each message should have only + * one signer corresponding to the granter of the authorization. + */ + +export interface MsgExecSDKType { + grantee: string; + msgs: AnySDKType[]; +} +/** MsgGrantResponse defines the Msg/MsgGrant response type. */ + +export interface MsgGrantResponse {} +/** MsgGrantResponse defines the Msg/MsgGrant response type. */ + +export interface MsgGrantResponseSDKType {} +/** + * MsgRevoke revokes any authorization with the provided sdk.Msg type on the + * granter's account with that has been granted to the grantee. + */ + +export interface MsgRevoke { + granter: string; + grantee: string; + msgTypeUrl: string; +} +/** + * MsgRevoke revokes any authorization with the provided sdk.Msg type on the + * granter's account with that has been granted to the grantee. + */ + +export interface MsgRevokeSDKType { + granter: string; + grantee: string; + msg_type_url: string; +} +/** MsgRevokeResponse defines the Msg/MsgRevokeResponse response type. */ + +export interface MsgRevokeResponse {} +/** MsgRevokeResponse defines the Msg/MsgRevokeResponse response type. */ + +export interface MsgRevokeResponseSDKType {} + +function createBaseMsgGrant(): MsgGrant { + return { + granter: "", + grantee: "", + grant: undefined + }; +} + +export const MsgGrant = { + encode(message: MsgGrant, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.granter !== "") { + writer.uint32(10).string(message.granter); + } + + if (message.grantee !== "") { + writer.uint32(18).string(message.grantee); + } + + if (message.grant !== undefined) { + Grant.encode(message.grant, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgGrant { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgGrant(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.granter = reader.string(); + break; + + case 2: + message.grantee = reader.string(); + break; + + case 3: + message.grant = Grant.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgGrant { + const message = createBaseMsgGrant(); + message.granter = object.granter ?? ""; + message.grantee = object.grantee ?? ""; + message.grant = object.grant !== undefined && object.grant !== null ? Grant.fromPartial(object.grant) : undefined; + return message; + } + +}; + +function createBaseMsgExecResponse(): MsgExecResponse { + return { + results: [] + }; +} + +export const MsgExecResponse = { + encode(message: MsgExecResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.results) { + writer.uint32(10).bytes(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExecResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgExecResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.results.push(reader.bytes()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgExecResponse { + const message = createBaseMsgExecResponse(); + message.results = object.results?.map(e => e) || []; + return message; + } + +}; + +function createBaseMsgExec(): MsgExec { + return { + grantee: "", + msgs: [] + }; +} + +export const MsgExec = { + encode(message: MsgExec, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.grantee !== "") { + writer.uint32(10).string(message.grantee); + } + + for (const v of message.msgs) { + Any.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExec { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgExec(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.grantee = reader.string(); + break; + + case 2: + message.msgs.push(Any.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgExec { + const message = createBaseMsgExec(); + message.grantee = object.grantee ?? ""; + message.msgs = object.msgs?.map(e => Any.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgGrantResponse(): MsgGrantResponse { + return {}; +} + +export const MsgGrantResponse = { + encode(_: MsgGrantResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgGrantResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgGrantResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgGrantResponse { + const message = createBaseMsgGrantResponse(); + return message; + } + +}; + +function createBaseMsgRevoke(): MsgRevoke { + return { + granter: "", + grantee: "", + msgTypeUrl: "" + }; +} + +export const MsgRevoke = { + encode(message: MsgRevoke, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.granter !== "") { + writer.uint32(10).string(message.granter); + } + + if (message.grantee !== "") { + writer.uint32(18).string(message.grantee); + } + + if (message.msgTypeUrl !== "") { + writer.uint32(26).string(message.msgTypeUrl); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevoke { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRevoke(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.granter = reader.string(); + break; + + case 2: + message.grantee = reader.string(); + break; + + case 3: + message.msgTypeUrl = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgRevoke { + const message = createBaseMsgRevoke(); + message.granter = object.granter ?? ""; + message.grantee = object.grantee ?? ""; + message.msgTypeUrl = object.msgTypeUrl ?? ""; + return message; + } + +}; + +function createBaseMsgRevokeResponse(): MsgRevokeResponse { + return {}; +} + +export const MsgRevokeResponse = { + encode(_: MsgRevokeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRevokeResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgRevokeResponse { + const message = createBaseMsgRevokeResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/bank/v1beta1/authz.ts b/packages/codegen/src/cosmos/bank/v1beta1/authz.ts new file mode 100644 index 00000000..b9c2c8cf --- /dev/null +++ b/packages/codegen/src/cosmos/bank/v1beta1/authz.ts @@ -0,0 +1,68 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * SendAuthorization allows the grantee to spend up to spend_limit coins from + * the granter's account. + * + * Since: cosmos-sdk 0.43 + */ + +export interface SendAuthorization { + spendLimit: Coin[]; +} +/** + * SendAuthorization allows the grantee to spend up to spend_limit coins from + * the granter's account. + * + * Since: cosmos-sdk 0.43 + */ + +export interface SendAuthorizationSDKType { + spend_limit: CoinSDKType[]; +} + +function createBaseSendAuthorization(): SendAuthorization { + return { + spendLimit: [] + }; +} + +export const SendAuthorization = { + encode(message: SendAuthorization, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.spendLimit) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SendAuthorization { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSendAuthorization(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.spendLimit.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SendAuthorization { + const message = createBaseSendAuthorization(); + message.spendLimit = object.spendLimit?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/bank/v1beta1/bank.ts b/packages/codegen/src/cosmos/bank/v1beta1/bank.ts new file mode 100644 index 00000000..78f2ea20 --- /dev/null +++ b/packages/codegen/src/cosmos/bank/v1beta1/bank.ts @@ -0,0 +1,620 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** Params defines the parameters for the bank module. */ + +export interface Params { + sendEnabled: SendEnabled[]; + defaultSendEnabled: boolean; +} +/** Params defines the parameters for the bank module. */ + +export interface ParamsSDKType { + send_enabled: SendEnabledSDKType[]; + default_send_enabled: boolean; +} +/** + * SendEnabled maps coin denom to a send_enabled status (whether a denom is + * sendable). + */ + +export interface SendEnabled { + denom: string; + enabled: boolean; +} +/** + * SendEnabled maps coin denom to a send_enabled status (whether a denom is + * sendable). + */ + +export interface SendEnabledSDKType { + denom: string; + enabled: boolean; +} +/** Input models transaction input. */ + +export interface Input { + address: string; + coins: Coin[]; +} +/** Input models transaction input. */ + +export interface InputSDKType { + address: string; + coins: CoinSDKType[]; +} +/** Output models transaction outputs. */ + +export interface Output { + address: string; + coins: Coin[]; +} +/** Output models transaction outputs. */ + +export interface OutputSDKType { + address: string; + coins: CoinSDKType[]; +} +/** + * Supply represents a struct that passively keeps track of the total supply + * amounts in the network. + * This message is deprecated now that supply is indexed by denom. + */ + +/** @deprecated */ + +export interface Supply { + total: Coin[]; +} +/** + * Supply represents a struct that passively keeps track of the total supply + * amounts in the network. + * This message is deprecated now that supply is indexed by denom. + */ + +/** @deprecated */ + +export interface SupplySDKType { + total: CoinSDKType[]; +} +/** + * DenomUnit represents a struct that describes a given + * denomination unit of the basic token. + */ + +export interface DenomUnit { + /** denom represents the string name of the given denom unit (e.g uatom). */ + denom: string; + /** + * exponent represents power of 10 exponent that one must + * raise the base_denom to in order to equal the given DenomUnit's denom + * 1 denom = 10^exponent base_denom + * (e.g. with a base_denom of uatom, one can create a DenomUnit of 'atom' with + * exponent = 6, thus: 1 atom = 10^6 uatom). + */ + + exponent: number; + /** aliases is a list of string aliases for the given denom */ + + aliases: string[]; +} +/** + * DenomUnit represents a struct that describes a given + * denomination unit of the basic token. + */ + +export interface DenomUnitSDKType { + denom: string; + exponent: number; + aliases: string[]; +} +/** + * Metadata represents a struct that describes + * a basic token. + */ + +export interface Metadata { + description: string; + /** denom_units represents the list of DenomUnit's for a given coin */ + + denomUnits: DenomUnit[]; + /** base represents the base denom (should be the DenomUnit with exponent = 0). */ + + base: string; + /** + * display indicates the suggested denom that should be + * displayed in clients. + */ + + display: string; + /** + * name defines the name of the token (eg: Cosmos Atom) + * + * Since: cosmos-sdk 0.43 + */ + + name: string; + /** + * symbol is the token symbol usually shown on exchanges (eg: ATOM). This can + * be the same as the display. + * + * Since: cosmos-sdk 0.43 + */ + + symbol: string; + /** + * URI to a document (on or off-chain) that contains additional information. Optional. + * + * Since: cosmos-sdk 0.46 + */ + + uri: string; + /** + * URIHash is a sha256 hash of a document pointed by URI. It's used to verify that + * the document didn't change. Optional. + * + * Since: cosmos-sdk 0.46 + */ + + uriHash: string; +} +/** + * Metadata represents a struct that describes + * a basic token. + */ + +export interface MetadataSDKType { + description: string; + denom_units: DenomUnitSDKType[]; + base: string; + display: string; + name: string; + symbol: string; + uri: string; + uri_hash: string; +} + +function createBaseParams(): Params { + return { + sendEnabled: [], + defaultSendEnabled: false + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.sendEnabled) { + SendEnabled.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.defaultSendEnabled === true) { + writer.uint32(16).bool(message.defaultSendEnabled); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sendEnabled.push(SendEnabled.decode(reader, reader.uint32())); + break; + + case 2: + message.defaultSendEnabled = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Params { + const message = createBaseParams(); + message.sendEnabled = object.sendEnabled?.map(e => SendEnabled.fromPartial(e)) || []; + message.defaultSendEnabled = object.defaultSendEnabled ?? false; + return message; + } + +}; + +function createBaseSendEnabled(): SendEnabled { + return { + denom: "", + enabled: false + }; +} + +export const SendEnabled = { + encode(message: SendEnabled, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denom !== "") { + writer.uint32(10).string(message.denom); + } + + if (message.enabled === true) { + writer.uint32(16).bool(message.enabled); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SendEnabled { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSendEnabled(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + + case 2: + message.enabled = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SendEnabled { + const message = createBaseSendEnabled(); + message.denom = object.denom ?? ""; + message.enabled = object.enabled ?? false; + return message; + } + +}; + +function createBaseInput(): Input { + return { + address: "", + coins: [] + }; +} + +export const Input = { + encode(message: Input, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + for (const v of message.coins) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Input { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInput(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.coins.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Input { + const message = createBaseInput(); + message.address = object.address ?? ""; + message.coins = object.coins?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseOutput(): Output { + return { + address: "", + coins: [] + }; +} + +export const Output = { + encode(message: Output, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + for (const v of message.coins) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Output { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOutput(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.coins.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Output { + const message = createBaseOutput(); + message.address = object.address ?? ""; + message.coins = object.coins?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseSupply(): Supply { + return { + total: [] + }; +} + +export const Supply = { + encode(message: Supply, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.total) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Supply { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSupply(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.total.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Supply { + const message = createBaseSupply(); + message.total = object.total?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseDenomUnit(): DenomUnit { + return { + denom: "", + exponent: 0, + aliases: [] + }; +} + +export const DenomUnit = { + encode(message: DenomUnit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denom !== "") { + writer.uint32(10).string(message.denom); + } + + if (message.exponent !== 0) { + writer.uint32(16).uint32(message.exponent); + } + + for (const v of message.aliases) { + writer.uint32(26).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DenomUnit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDenomUnit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + + case 2: + message.exponent = reader.uint32(); + break; + + case 3: + message.aliases.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DenomUnit { + const message = createBaseDenomUnit(); + message.denom = object.denom ?? ""; + message.exponent = object.exponent ?? 0; + message.aliases = object.aliases?.map(e => e) || []; + return message; + } + +}; + +function createBaseMetadata(): Metadata { + return { + description: "", + denomUnits: [], + base: "", + display: "", + name: "", + symbol: "", + uri: "", + uriHash: "" + }; +} + +export const Metadata = { + encode(message: Metadata, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.description !== "") { + writer.uint32(10).string(message.description); + } + + for (const v of message.denomUnits) { + DenomUnit.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.base !== "") { + writer.uint32(26).string(message.base); + } + + if (message.display !== "") { + writer.uint32(34).string(message.display); + } + + if (message.name !== "") { + writer.uint32(42).string(message.name); + } + + if (message.symbol !== "") { + writer.uint32(50).string(message.symbol); + } + + if (message.uri !== "") { + writer.uint32(58).string(message.uri); + } + + if (message.uriHash !== "") { + writer.uint32(66).string(message.uriHash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Metadata { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMetadata(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.description = reader.string(); + break; + + case 2: + message.denomUnits.push(DenomUnit.decode(reader, reader.uint32())); + break; + + case 3: + message.base = reader.string(); + break; + + case 4: + message.display = reader.string(); + break; + + case 5: + message.name = reader.string(); + break; + + case 6: + message.symbol = reader.string(); + break; + + case 7: + message.uri = reader.string(); + break; + + case 8: + message.uriHash = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Metadata { + const message = createBaseMetadata(); + message.description = object.description ?? ""; + message.denomUnits = object.denomUnits?.map(e => DenomUnit.fromPartial(e)) || []; + message.base = object.base ?? ""; + message.display = object.display ?? ""; + message.name = object.name ?? ""; + message.symbol = object.symbol ?? ""; + message.uri = object.uri ?? ""; + message.uriHash = object.uriHash ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/bank/v1beta1/genesis.ts b/packages/codegen/src/cosmos/bank/v1beta1/genesis.ts new file mode 100644 index 00000000..71fe32bb --- /dev/null +++ b/packages/codegen/src/cosmos/bank/v1beta1/genesis.ts @@ -0,0 +1,181 @@ +import { Params, ParamsSDKType, Metadata, MetadataSDKType } from "./bank"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the bank module's genesis state. */ + +export interface GenesisState { + /** params defines all the paramaters of the module. */ + params?: Params; + /** balances is an array containing the balances of all the accounts. */ + + balances: Balance[]; + /** + * supply represents the total supply. If it is left empty, then supply will be calculated based on the provided + * balances. Otherwise, it will be used to validate that the sum of the balances equals this amount. + */ + + supply: Coin[]; + /** denom_metadata defines the metadata of the differents coins. */ + + denomMetadata: Metadata[]; +} +/** GenesisState defines the bank module's genesis state. */ + +export interface GenesisStateSDKType { + params?: ParamsSDKType; + balances: BalanceSDKType[]; + supply: CoinSDKType[]; + denom_metadata: MetadataSDKType[]; +} +/** + * Balance defines an account address and balance pair used in the bank module's + * genesis state. + */ + +export interface Balance { + /** address is the address of the balance holder. */ + address: string; + /** coins defines the different coins this balance holds. */ + + coins: Coin[]; +} +/** + * Balance defines an account address and balance pair used in the bank module's + * genesis state. + */ + +export interface BalanceSDKType { + address: string; + coins: CoinSDKType[]; +} + +function createBaseGenesisState(): GenesisState { + return { + params: undefined, + balances: [], + supply: [], + denomMetadata: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.balances) { + Balance.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.supply) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.denomMetadata) { + Metadata.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + case 2: + message.balances.push(Balance.decode(reader, reader.uint32())); + break; + + case 3: + message.supply.push(Coin.decode(reader, reader.uint32())); + break; + + case 4: + message.denomMetadata.push(Metadata.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + message.balances = object.balances?.map(e => Balance.fromPartial(e)) || []; + message.supply = object.supply?.map(e => Coin.fromPartial(e)) || []; + message.denomMetadata = object.denomMetadata?.map(e => Metadata.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseBalance(): Balance { + return { + address: "", + coins: [] + }; +} + +export const Balance = { + encode(message: Balance, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + for (const v of message.coins) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Balance { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBalance(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.coins.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Balance { + const message = createBaseBalance(); + message.address = object.address ?? ""; + message.coins = object.coins?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/bank/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/bank/v1beta1/query.lcd.ts new file mode 100644 index 00000000..7535655d --- /dev/null +++ b/packages/codegen/src/cosmos/bank/v1beta1/query.lcd.ts @@ -0,0 +1,150 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryBalanceRequest, QueryBalanceResponseSDKType, QueryAllBalancesRequest, QueryAllBalancesResponseSDKType, QuerySpendableBalancesRequest, QuerySpendableBalancesResponseSDKType, QueryTotalSupplyRequest, QueryTotalSupplyResponseSDKType, QuerySupplyOfRequest, QuerySupplyOfResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType, QueryDenomMetadataRequest, QueryDenomMetadataResponseSDKType, QueryDenomsMetadataRequest, QueryDenomsMetadataResponseSDKType, QueryDenomOwnersRequest, QueryDenomOwnersResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.balance = this.balance.bind(this); + this.allBalances = this.allBalances.bind(this); + this.spendableBalances = this.spendableBalances.bind(this); + this.totalSupply = this.totalSupply.bind(this); + this.supplyOf = this.supplyOf.bind(this); + this.params = this.params.bind(this); + this.denomMetadata = this.denomMetadata.bind(this); + this.denomsMetadata = this.denomsMetadata.bind(this); + this.denomOwners = this.denomOwners.bind(this); + } + /* Balance queries the balance of a single coin for a single account. */ + + + async balance(params: QueryBalanceRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.denom !== "undefined") { + options.params.denom = params.denom; + } + + const endpoint = `cosmos/bank/v1beta1/balances/${params.address}/by_denom`; + return await this.req.get(endpoint, options); + } + /* AllBalances queries the balance of all coins for a single account. */ + + + async allBalances(params: QueryAllBalancesRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/bank/v1beta1/balances/${params.address}`; + return await this.req.get(endpoint, options); + } + /* SpendableBalances queries the spenable balance of all coins for a single + account. */ + + + async spendableBalances(params: QuerySpendableBalancesRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/bank/v1beta1/spendable_balances/${params.address}`; + return await this.req.get(endpoint, options); + } + /* TotalSupply queries the total supply of all coins. */ + + + async totalSupply(params: QueryTotalSupplyRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/bank/v1beta1/supply`; + return await this.req.get(endpoint, options); + } + /* SupplyOf queries the supply of a single coin. */ + + + async supplyOf(params: QuerySupplyOfRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.denom !== "undefined") { + options.params.denom = params.denom; + } + + const endpoint = `cosmos/bank/v1beta1/supply/by_denom`; + return await this.req.get(endpoint, options); + } + /* Params queries the parameters of x/bank module. */ + + + async params(_params: QueryParamsRequest = {}): Promise { + const endpoint = `cosmos/bank/v1beta1/params`; + return await this.req.get(endpoint); + } + /* DenomsMetadata queries the client metadata of a given coin denomination. */ + + + async denomMetadata(params: QueryDenomMetadataRequest): Promise { + const endpoint = `cosmos/bank/v1beta1/denoms_metadata/${params.denom}`; + return await this.req.get(endpoint); + } + /* DenomsMetadata queries the client metadata for all registered coin + denominations. */ + + + async denomsMetadata(params: QueryDenomsMetadataRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/bank/v1beta1/denoms_metadata`; + return await this.req.get(endpoint, options); + } + /* DenomOwners queries for all account addresses that own a particular token + denomination. */ + + + async denomOwners(params: QueryDenomOwnersRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/bank/v1beta1/denom_owners/${params.denom}`; + return await this.req.get(endpoint, options); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/bank/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/bank/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..a60bbc8c --- /dev/null +++ b/packages/codegen/src/cosmos/bank/v1beta1/query.rpc.Query.ts @@ -0,0 +1,160 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryBalanceRequest, QueryBalanceResponse, QueryAllBalancesRequest, QueryAllBalancesResponse, QuerySpendableBalancesRequest, QuerySpendableBalancesResponse, QueryTotalSupplyRequest, QueryTotalSupplyResponse, QuerySupplyOfRequest, QuerySupplyOfResponse, QueryParamsRequest, QueryParamsResponse, QueryDenomMetadataRequest, QueryDenomMetadataResponse, QueryDenomsMetadataRequest, QueryDenomsMetadataResponse, QueryDenomOwnersRequest, QueryDenomOwnersResponse } from "./query"; +/** Query defines the gRPC querier service. */ + +export interface Query { + /** Balance queries the balance of a single coin for a single account. */ + balance(request: QueryBalanceRequest): Promise; + /** AllBalances queries the balance of all coins for a single account. */ + + allBalances(request: QueryAllBalancesRequest): Promise; + /** + * SpendableBalances queries the spenable balance of all coins for a single + * account. + */ + + spendableBalances(request: QuerySpendableBalancesRequest): Promise; + /** TotalSupply queries the total supply of all coins. */ + + totalSupply(request?: QueryTotalSupplyRequest): Promise; + /** SupplyOf queries the supply of a single coin. */ + + supplyOf(request: QuerySupplyOfRequest): Promise; + /** Params queries the parameters of x/bank module. */ + + params(request?: QueryParamsRequest): Promise; + /** DenomsMetadata queries the client metadata of a given coin denomination. */ + + denomMetadata(request: QueryDenomMetadataRequest): Promise; + /** + * DenomsMetadata queries the client metadata for all registered coin + * denominations. + */ + + denomsMetadata(request?: QueryDenomsMetadataRequest): Promise; + /** + * DenomOwners queries for all account addresses that own a particular token + * denomination. + */ + + denomOwners(request: QueryDenomOwnersRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.balance = this.balance.bind(this); + this.allBalances = this.allBalances.bind(this); + this.spendableBalances = this.spendableBalances.bind(this); + this.totalSupply = this.totalSupply.bind(this); + this.supplyOf = this.supplyOf.bind(this); + this.params = this.params.bind(this); + this.denomMetadata = this.denomMetadata.bind(this); + this.denomsMetadata = this.denomsMetadata.bind(this); + this.denomOwners = this.denomOwners.bind(this); + } + + balance(request: QueryBalanceRequest): Promise { + const data = QueryBalanceRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.bank.v1beta1.Query", "Balance", data); + return promise.then(data => QueryBalanceResponse.decode(new _m0.Reader(data))); + } + + allBalances(request: QueryAllBalancesRequest): Promise { + const data = QueryAllBalancesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.bank.v1beta1.Query", "AllBalances", data); + return promise.then(data => QueryAllBalancesResponse.decode(new _m0.Reader(data))); + } + + spendableBalances(request: QuerySpendableBalancesRequest): Promise { + const data = QuerySpendableBalancesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.bank.v1beta1.Query", "SpendableBalances", data); + return promise.then(data => QuerySpendableBalancesResponse.decode(new _m0.Reader(data))); + } + + totalSupply(request: QueryTotalSupplyRequest = { + pagination: undefined + }): Promise { + const data = QueryTotalSupplyRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.bank.v1beta1.Query", "TotalSupply", data); + return promise.then(data => QueryTotalSupplyResponse.decode(new _m0.Reader(data))); + } + + supplyOf(request: QuerySupplyOfRequest): Promise { + const data = QuerySupplyOfRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.bank.v1beta1.Query", "SupplyOf", data); + return promise.then(data => QuerySupplyOfResponse.decode(new _m0.Reader(data))); + } + + params(request: QueryParamsRequest = {}): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.bank.v1beta1.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + denomMetadata(request: QueryDenomMetadataRequest): Promise { + const data = QueryDenomMetadataRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.bank.v1beta1.Query", "DenomMetadata", data); + return promise.then(data => QueryDenomMetadataResponse.decode(new _m0.Reader(data))); + } + + denomsMetadata(request: QueryDenomsMetadataRequest = { + pagination: undefined + }): Promise { + const data = QueryDenomsMetadataRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.bank.v1beta1.Query", "DenomsMetadata", data); + return promise.then(data => QueryDenomsMetadataResponse.decode(new _m0.Reader(data))); + } + + denomOwners(request: QueryDenomOwnersRequest): Promise { + const data = QueryDenomOwnersRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.bank.v1beta1.Query", "DenomOwners", data); + return promise.then(data => QueryDenomOwnersResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + balance(request: QueryBalanceRequest): Promise { + return queryService.balance(request); + }, + + allBalances(request: QueryAllBalancesRequest): Promise { + return queryService.allBalances(request); + }, + + spendableBalances(request: QuerySpendableBalancesRequest): Promise { + return queryService.spendableBalances(request); + }, + + totalSupply(request?: QueryTotalSupplyRequest): Promise { + return queryService.totalSupply(request); + }, + + supplyOf(request: QuerySupplyOfRequest): Promise { + return queryService.supplyOf(request); + }, + + params(request?: QueryParamsRequest): Promise { + return queryService.params(request); + }, + + denomMetadata(request: QueryDenomMetadataRequest): Promise { + return queryService.denomMetadata(request); + }, + + denomsMetadata(request?: QueryDenomsMetadataRequest): Promise { + return queryService.denomsMetadata(request); + }, + + denomOwners(request: QueryDenomOwnersRequest): Promise { + return queryService.denomOwners(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/bank/v1beta1/query.ts b/packages/codegen/src/cosmos/bank/v1beta1/query.ts new file mode 100644 index 00000000..5da00ebd --- /dev/null +++ b/packages/codegen/src/cosmos/bank/v1beta1/query.ts @@ -0,0 +1,1257 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Params, ParamsSDKType, Metadata, MetadataSDKType } from "./bank"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryBalanceRequest is the request type for the Query/Balance RPC method. */ + +export interface QueryBalanceRequest { + /** address is the address to query balances for. */ + address: string; + /** denom is the coin denom to query balances for. */ + + denom: string; +} +/** QueryBalanceRequest is the request type for the Query/Balance RPC method. */ + +export interface QueryBalanceRequestSDKType { + address: string; + denom: string; +} +/** QueryBalanceResponse is the response type for the Query/Balance RPC method. */ + +export interface QueryBalanceResponse { + /** balance is the balance of the coin. */ + balance?: Coin; +} +/** QueryBalanceResponse is the response type for the Query/Balance RPC method. */ + +export interface QueryBalanceResponseSDKType { + balance?: CoinSDKType; +} +/** QueryBalanceRequest is the request type for the Query/AllBalances RPC method. */ + +export interface QueryAllBalancesRequest { + /** address is the address to query balances for. */ + address: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryBalanceRequest is the request type for the Query/AllBalances RPC method. */ + +export interface QueryAllBalancesRequestSDKType { + address: string; + pagination?: PageRequestSDKType; +} +/** + * QueryAllBalancesResponse is the response type for the Query/AllBalances RPC + * method. + */ + +export interface QueryAllBalancesResponse { + /** balances is the balances of all the coins. */ + balances: Coin[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryAllBalancesResponse is the response type for the Query/AllBalances RPC + * method. + */ + +export interface QueryAllBalancesResponseSDKType { + balances: CoinSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QuerySpendableBalancesRequest defines the gRPC request structure for querying + * an account's spendable balances. + */ + +export interface QuerySpendableBalancesRequest { + /** address is the address to query spendable balances for. */ + address: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** + * QuerySpendableBalancesRequest defines the gRPC request structure for querying + * an account's spendable balances. + */ + +export interface QuerySpendableBalancesRequestSDKType { + address: string; + pagination?: PageRequestSDKType; +} +/** + * QuerySpendableBalancesResponse defines the gRPC response structure for querying + * an account's spendable balances. + */ + +export interface QuerySpendableBalancesResponse { + /** balances is the spendable balances of all the coins. */ + balances: Coin[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QuerySpendableBalancesResponse defines the gRPC response structure for querying + * an account's spendable balances. + */ + +export interface QuerySpendableBalancesResponseSDKType { + balances: CoinSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryTotalSupplyRequest is the request type for the Query/TotalSupply RPC + * method. + */ + +export interface QueryTotalSupplyRequest { + /** + * pagination defines an optional pagination for the request. + * + * Since: cosmos-sdk 0.43 + */ + pagination?: PageRequest; +} +/** + * QueryTotalSupplyRequest is the request type for the Query/TotalSupply RPC + * method. + */ + +export interface QueryTotalSupplyRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryTotalSupplyResponse is the response type for the Query/TotalSupply RPC + * method + */ + +export interface QueryTotalSupplyResponse { + /** supply is the supply of the coins */ + supply: Coin[]; + /** + * pagination defines the pagination in the response. + * + * Since: cosmos-sdk 0.43 + */ + + pagination?: PageResponse; +} +/** + * QueryTotalSupplyResponse is the response type for the Query/TotalSupply RPC + * method + */ + +export interface QueryTotalSupplyResponseSDKType { + supply: CoinSDKType[]; + pagination?: PageResponseSDKType; +} +/** QuerySupplyOfRequest is the request type for the Query/SupplyOf RPC method. */ + +export interface QuerySupplyOfRequest { + /** denom is the coin denom to query balances for. */ + denom: string; +} +/** QuerySupplyOfRequest is the request type for the Query/SupplyOf RPC method. */ + +export interface QuerySupplyOfRequestSDKType { + denom: string; +} +/** QuerySupplyOfResponse is the response type for the Query/SupplyOf RPC method. */ + +export interface QuerySupplyOfResponse { + /** amount is the supply of the coin. */ + amount?: Coin; +} +/** QuerySupplyOfResponse is the response type for the Query/SupplyOf RPC method. */ + +export interface QuerySupplyOfResponseSDKType { + amount?: CoinSDKType; +} +/** QueryParamsRequest defines the request type for querying x/bank parameters. */ + +export interface QueryParamsRequest {} +/** QueryParamsRequest defines the request type for querying x/bank parameters. */ + +export interface QueryParamsRequestSDKType {} +/** QueryParamsResponse defines the response type for querying x/bank parameters. */ + +export interface QueryParamsResponse { + params?: Params; +} +/** QueryParamsResponse defines the response type for querying x/bank parameters. */ + +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +/** QueryDenomsMetadataRequest is the request type for the Query/DenomsMetadata RPC method. */ + +export interface QueryDenomsMetadataRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryDenomsMetadataRequest is the request type for the Query/DenomsMetadata RPC method. */ + +export interface QueryDenomsMetadataRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryDenomsMetadataResponse is the response type for the Query/DenomsMetadata RPC + * method. + */ + +export interface QueryDenomsMetadataResponse { + /** metadata provides the client information for all the registered tokens. */ + metadatas: Metadata[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryDenomsMetadataResponse is the response type for the Query/DenomsMetadata RPC + * method. + */ + +export interface QueryDenomsMetadataResponseSDKType { + metadatas: MetadataSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryDenomMetadataRequest is the request type for the Query/DenomMetadata RPC method. */ + +export interface QueryDenomMetadataRequest { + /** denom is the coin denom to query the metadata for. */ + denom: string; +} +/** QueryDenomMetadataRequest is the request type for the Query/DenomMetadata RPC method. */ + +export interface QueryDenomMetadataRequestSDKType { + denom: string; +} +/** + * QueryDenomMetadataResponse is the response type for the Query/DenomMetadata RPC + * method. + */ + +export interface QueryDenomMetadataResponse { + /** metadata describes and provides all the client information for the requested token. */ + metadata?: Metadata; +} +/** + * QueryDenomMetadataResponse is the response type for the Query/DenomMetadata RPC + * method. + */ + +export interface QueryDenomMetadataResponseSDKType { + metadata?: MetadataSDKType; +} +/** + * QueryDenomOwnersRequest defines the request type for the DenomOwners RPC query, + * which queries for a paginated set of all account holders of a particular + * denomination. + */ + +export interface QueryDenomOwnersRequest { + /** denom defines the coin denomination to query all account holders for. */ + denom: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** + * QueryDenomOwnersRequest defines the request type for the DenomOwners RPC query, + * which queries for a paginated set of all account holders of a particular + * denomination. + */ + +export interface QueryDenomOwnersRequestSDKType { + denom: string; + pagination?: PageRequestSDKType; +} +/** + * DenomOwner defines structure representing an account that owns or holds a + * particular denominated token. It contains the account address and account + * balance of the denominated token. + */ + +export interface DenomOwner { + /** address defines the address that owns a particular denomination. */ + address: string; + /** balance is the balance of the denominated coin for an account. */ + + balance?: Coin; +} +/** + * DenomOwner defines structure representing an account that owns or holds a + * particular denominated token. It contains the account address and account + * balance of the denominated token. + */ + +export interface DenomOwnerSDKType { + address: string; + balance?: CoinSDKType; +} +/** QueryDenomOwnersResponse defines the RPC response of a DenomOwners RPC query. */ + +export interface QueryDenomOwnersResponse { + denomOwners: DenomOwner[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryDenomOwnersResponse defines the RPC response of a DenomOwners RPC query. */ + +export interface QueryDenomOwnersResponseSDKType { + denom_owners: DenomOwnerSDKType[]; + pagination?: PageResponseSDKType; +} + +function createBaseQueryBalanceRequest(): QueryBalanceRequest { + return { + address: "", + denom: "" + }; +} + +export const QueryBalanceRequest = { + encode(message: QueryBalanceRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.denom !== "") { + writer.uint32(18).string(message.denom); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryBalanceRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryBalanceRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.denom = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryBalanceRequest { + const message = createBaseQueryBalanceRequest(); + message.address = object.address ?? ""; + message.denom = object.denom ?? ""; + return message; + } + +}; + +function createBaseQueryBalanceResponse(): QueryBalanceResponse { + return { + balance: undefined + }; +} + +export const QueryBalanceResponse = { + encode(message: QueryBalanceResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.balance !== undefined) { + Coin.encode(message.balance, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryBalanceResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryBalanceResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.balance = Coin.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryBalanceResponse { + const message = createBaseQueryBalanceResponse(); + message.balance = object.balance !== undefined && object.balance !== null ? Coin.fromPartial(object.balance) : undefined; + return message; + } + +}; + +function createBaseQueryAllBalancesRequest(): QueryAllBalancesRequest { + return { + address: "", + pagination: undefined + }; +} + +export const QueryAllBalancesRequest = { + encode(message: QueryAllBalancesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllBalancesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllBalancesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllBalancesRequest { + const message = createBaseQueryAllBalancesRequest(); + message.address = object.address ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryAllBalancesResponse(): QueryAllBalancesResponse { + return { + balances: [], + pagination: undefined + }; +} + +export const QueryAllBalancesResponse = { + encode(message: QueryAllBalancesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.balances) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllBalancesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllBalancesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.balances.push(Coin.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllBalancesResponse { + const message = createBaseQueryAllBalancesResponse(); + message.balances = object.balances?.map(e => Coin.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQuerySpendableBalancesRequest(): QuerySpendableBalancesRequest { + return { + address: "", + pagination: undefined + }; +} + +export const QuerySpendableBalancesRequest = { + encode(message: QuerySpendableBalancesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySpendableBalancesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySpendableBalancesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySpendableBalancesRequest { + const message = createBaseQuerySpendableBalancesRequest(); + message.address = object.address ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQuerySpendableBalancesResponse(): QuerySpendableBalancesResponse { + return { + balances: [], + pagination: undefined + }; +} + +export const QuerySpendableBalancesResponse = { + encode(message: QuerySpendableBalancesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.balances) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySpendableBalancesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySpendableBalancesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.balances.push(Coin.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySpendableBalancesResponse { + const message = createBaseQuerySpendableBalancesResponse(); + message.balances = object.balances?.map(e => Coin.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryTotalSupplyRequest(): QueryTotalSupplyRequest { + return { + pagination: undefined + }; +} + +export const QueryTotalSupplyRequest = { + encode(message: QueryTotalSupplyRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTotalSupplyRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryTotalSupplyRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryTotalSupplyRequest { + const message = createBaseQueryTotalSupplyRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryTotalSupplyResponse(): QueryTotalSupplyResponse { + return { + supply: [], + pagination: undefined + }; +} + +export const QueryTotalSupplyResponse = { + encode(message: QueryTotalSupplyResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.supply) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTotalSupplyResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryTotalSupplyResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.supply.push(Coin.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryTotalSupplyResponse { + const message = createBaseQueryTotalSupplyResponse(); + message.supply = object.supply?.map(e => Coin.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQuerySupplyOfRequest(): QuerySupplyOfRequest { + return { + denom: "" + }; +} + +export const QuerySupplyOfRequest = { + encode(message: QuerySupplyOfRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denom !== "") { + writer.uint32(10).string(message.denom); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySupplyOfRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySupplyOfRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySupplyOfRequest { + const message = createBaseQuerySupplyOfRequest(); + message.denom = object.denom ?? ""; + return message; + } + +}; + +function createBaseQuerySupplyOfResponse(): QuerySupplyOfResponse { + return { + amount: undefined + }; +} + +export const QuerySupplyOfResponse = { + encode(message: QuerySupplyOfResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.amount !== undefined) { + Coin.encode(message.amount, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySupplyOfResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySupplyOfResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.amount = Coin.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySupplyOfResponse { + const message = createBaseQuerySupplyOfResponse(); + message.amount = object.amount !== undefined && object.amount !== null ? Coin.fromPartial(object.amount) : undefined; + return message; + } + +}; + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + params: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseQueryDenomsMetadataRequest(): QueryDenomsMetadataRequest { + return { + pagination: undefined + }; +} + +export const QueryDenomsMetadataRequest = { + encode(message: QueryDenomsMetadataRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomsMetadataRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDenomsMetadataRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDenomsMetadataRequest { + const message = createBaseQueryDenomsMetadataRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDenomsMetadataResponse(): QueryDenomsMetadataResponse { + return { + metadatas: [], + pagination: undefined + }; +} + +export const QueryDenomsMetadataResponse = { + encode(message: QueryDenomsMetadataResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.metadatas) { + Metadata.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomsMetadataResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDenomsMetadataResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.metadatas.push(Metadata.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDenomsMetadataResponse { + const message = createBaseQueryDenomsMetadataResponse(); + message.metadatas = object.metadatas?.map(e => Metadata.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDenomMetadataRequest(): QueryDenomMetadataRequest { + return { + denom: "" + }; +} + +export const QueryDenomMetadataRequest = { + encode(message: QueryDenomMetadataRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denom !== "") { + writer.uint32(10).string(message.denom); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomMetadataRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDenomMetadataRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDenomMetadataRequest { + const message = createBaseQueryDenomMetadataRequest(); + message.denom = object.denom ?? ""; + return message; + } + +}; + +function createBaseQueryDenomMetadataResponse(): QueryDenomMetadataResponse { + return { + metadata: undefined + }; +} + +export const QueryDenomMetadataResponse = { + encode(message: QueryDenomMetadataResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.metadata !== undefined) { + Metadata.encode(message.metadata, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomMetadataResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDenomMetadataResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.metadata = Metadata.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDenomMetadataResponse { + const message = createBaseQueryDenomMetadataResponse(); + message.metadata = object.metadata !== undefined && object.metadata !== null ? Metadata.fromPartial(object.metadata) : undefined; + return message; + } + +}; + +function createBaseQueryDenomOwnersRequest(): QueryDenomOwnersRequest { + return { + denom: "", + pagination: undefined + }; +} + +export const QueryDenomOwnersRequest = { + encode(message: QueryDenomOwnersRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denom !== "") { + writer.uint32(10).string(message.denom); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomOwnersRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDenomOwnersRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDenomOwnersRequest { + const message = createBaseQueryDenomOwnersRequest(); + message.denom = object.denom ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseDenomOwner(): DenomOwner { + return { + address: "", + balance: undefined + }; +} + +export const DenomOwner = { + encode(message: DenomOwner, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.balance !== undefined) { + Coin.encode(message.balance, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DenomOwner { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDenomOwner(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.balance = Coin.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DenomOwner { + const message = createBaseDenomOwner(); + message.address = object.address ?? ""; + message.balance = object.balance !== undefined && object.balance !== null ? Coin.fromPartial(object.balance) : undefined; + return message; + } + +}; + +function createBaseQueryDenomOwnersResponse(): QueryDenomOwnersResponse { + return { + denomOwners: [], + pagination: undefined + }; +} + +export const QueryDenomOwnersResponse = { + encode(message: QueryDenomOwnersResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.denomOwners) { + DenomOwner.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomOwnersResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDenomOwnersResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.denomOwners.push(DenomOwner.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDenomOwnersResponse { + const message = createBaseQueryDenomOwnersResponse(); + message.denomOwners = object.denomOwners?.map(e => DenomOwner.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/bank/v1beta1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/bank/v1beta1/tx.rpc.msg.ts new file mode 100644 index 00000000..4112bb9d --- /dev/null +++ b/packages/codegen/src/cosmos/bank/v1beta1/tx.rpc.msg.ts @@ -0,0 +1,34 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgSend, MsgSendResponse, MsgMultiSend, MsgMultiSendResponse } from "./tx"; +/** Msg defines the bank Msg service. */ + +export interface Msg { + /** Send defines a method for sending coins from one account to another account. */ + send(request: MsgSend): Promise; + /** MultiSend defines a method for sending coins from some accounts to other accounts. */ + + multiSend(request: MsgMultiSend): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.send = this.send.bind(this); + this.multiSend = this.multiSend.bind(this); + } + + send(request: MsgSend): Promise { + const data = MsgSend.encode(request).finish(); + const promise = this.rpc.request("cosmos.bank.v1beta1.Msg", "Send", data); + return promise.then(data => MsgSendResponse.decode(new _m0.Reader(data))); + } + + multiSend(request: MsgMultiSend): Promise { + const data = MsgMultiSend.encode(request).finish(); + const promise = this.rpc.request("cosmos.bank.v1beta1.Msg", "MultiSend", data); + return promise.then(data => MsgMultiSendResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/bank/v1beta1/tx.ts b/packages/codegen/src/cosmos/bank/v1beta1/tx.ts new file mode 100644 index 00000000..25716b1b --- /dev/null +++ b/packages/codegen/src/cosmos/bank/v1beta1/tx.ts @@ -0,0 +1,230 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Input, InputSDKType, Output, OutputSDKType } from "./bank"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** MsgSend represents a message to send coins from one account to another. */ + +export interface MsgSend { + fromAddress: string; + toAddress: string; + amount: Coin[]; +} +/** MsgSend represents a message to send coins from one account to another. */ + +export interface MsgSendSDKType { + from_address: string; + to_address: string; + amount: CoinSDKType[]; +} +/** MsgSendResponse defines the Msg/Send response type. */ + +export interface MsgSendResponse {} +/** MsgSendResponse defines the Msg/Send response type. */ + +export interface MsgSendResponseSDKType {} +/** MsgMultiSend represents an arbitrary multi-in, multi-out send message. */ + +export interface MsgMultiSend { + inputs: Input[]; + outputs: Output[]; +} +/** MsgMultiSend represents an arbitrary multi-in, multi-out send message. */ + +export interface MsgMultiSendSDKType { + inputs: InputSDKType[]; + outputs: OutputSDKType[]; +} +/** MsgMultiSendResponse defines the Msg/MultiSend response type. */ + +export interface MsgMultiSendResponse {} +/** MsgMultiSendResponse defines the Msg/MultiSend response type. */ + +export interface MsgMultiSendResponseSDKType {} + +function createBaseMsgSend(): MsgSend { + return { + fromAddress: "", + toAddress: "", + amount: [] + }; +} + +export const MsgSend = { + encode(message: MsgSend, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fromAddress !== "") { + writer.uint32(10).string(message.fromAddress); + } + + if (message.toAddress !== "") { + writer.uint32(18).string(message.toAddress); + } + + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSend { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSend(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.fromAddress = reader.string(); + break; + + case 2: + message.toAddress = reader.string(); + break; + + case 3: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSend { + const message = createBaseMsgSend(); + message.fromAddress = object.fromAddress ?? ""; + message.toAddress = object.toAddress ?? ""; + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgSendResponse(): MsgSendResponse { + return {}; +} + +export const MsgSendResponse = { + encode(_: MsgSendResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSendResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSendResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgSendResponse { + const message = createBaseMsgSendResponse(); + return message; + } + +}; + +function createBaseMsgMultiSend(): MsgMultiSend { + return { + inputs: [], + outputs: [] + }; +} + +export const MsgMultiSend = { + encode(message: MsgMultiSend, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.inputs) { + Input.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.outputs) { + Output.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgMultiSend { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgMultiSend(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.inputs.push(Input.decode(reader, reader.uint32())); + break; + + case 2: + message.outputs.push(Output.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgMultiSend { + const message = createBaseMsgMultiSend(); + message.inputs = object.inputs?.map(e => Input.fromPartial(e)) || []; + message.outputs = object.outputs?.map(e => Output.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgMultiSendResponse(): MsgMultiSendResponse { + return {}; +} + +export const MsgMultiSendResponse = { + encode(_: MsgMultiSendResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgMultiSendResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgMultiSendResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgMultiSendResponse { + const message = createBaseMsgMultiSendResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/base/abci/v1beta1/abci.ts b/packages/codegen/src/cosmos/base/abci/v1beta1/abci.ts new file mode 100644 index 00000000..cdf74114 --- /dev/null +++ b/packages/codegen/src/cosmos/base/abci/v1beta1/abci.ts @@ -0,0 +1,1020 @@ +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { Event, EventSDKType } from "../../../../tendermint/abci/types"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * TxResponse defines a structure containing relevant tx data and metadata. The + * tags are stringified and the log is JSON decoded. + */ + +export interface TxResponse { + /** The block height */ + height: Long; + /** The transaction hash. */ + + txhash: string; + /** Namespace for the Code */ + + codespace: string; + /** Response code. */ + + code: number; + /** Result bytes, if any. */ + + data: string; + /** + * The output of the application's logger (raw string). May be + * non-deterministic. + */ + + rawLog: string; + /** The output of the application's logger (typed). May be non-deterministic. */ + + logs: ABCIMessageLog[]; + /** Additional information. May be non-deterministic. */ + + info: string; + /** Amount of gas requested for transaction. */ + + gasWanted: Long; + /** Amount of gas consumed by transaction. */ + + gasUsed: Long; + /** The request transaction bytes. */ + + tx?: Any; + /** + * Time of the previous block. For heights > 1, it's the weighted median of + * the timestamps of the valid votes in the block.LastCommit. For height == 1, + * it's genesis time. + */ + + timestamp: string; + /** + * Events defines all the events emitted by processing a transaction. Note, + * these events include those emitted by processing all the messages and those + * emitted from the ante handler. Whereas Logs contains the events, with + * additional metadata, emitted only by processing the messages. + * + * Since: cosmos-sdk 0.42.11, 0.44.5, 0.45 + */ + + events: Event[]; +} +/** + * TxResponse defines a structure containing relevant tx data and metadata. The + * tags are stringified and the log is JSON decoded. + */ + +export interface TxResponseSDKType { + height: Long; + txhash: string; + codespace: string; + code: number; + data: string; + raw_log: string; + logs: ABCIMessageLogSDKType[]; + info: string; + gas_wanted: Long; + gas_used: Long; + tx?: AnySDKType; + timestamp: string; + events: EventSDKType[]; +} +/** ABCIMessageLog defines a structure containing an indexed tx ABCI message log. */ + +export interface ABCIMessageLog { + msgIndex: number; + log: string; + /** + * Events contains a slice of Event objects that were emitted during some + * execution. + */ + + events: StringEvent[]; +} +/** ABCIMessageLog defines a structure containing an indexed tx ABCI message log. */ + +export interface ABCIMessageLogSDKType { + msg_index: number; + log: string; + events: StringEventSDKType[]; +} +/** + * StringEvent defines en Event object wrapper where all the attributes + * contain key/value pairs that are strings instead of raw bytes. + */ + +export interface StringEvent { + type: string; + attributes: Attribute[]; +} +/** + * StringEvent defines en Event object wrapper where all the attributes + * contain key/value pairs that are strings instead of raw bytes. + */ + +export interface StringEventSDKType { + type: string; + attributes: AttributeSDKType[]; +} +/** + * Attribute defines an attribute wrapper where the key and value are + * strings instead of raw bytes. + */ + +export interface Attribute { + key: string; + value: string; +} +/** + * Attribute defines an attribute wrapper where the key and value are + * strings instead of raw bytes. + */ + +export interface AttributeSDKType { + key: string; + value: string; +} +/** GasInfo defines tx execution gas context. */ + +export interface GasInfo { + /** GasWanted is the maximum units of work we allow this tx to perform. */ + gasWanted: Long; + /** GasUsed is the amount of gas actually consumed. */ + + gasUsed: Long; +} +/** GasInfo defines tx execution gas context. */ + +export interface GasInfoSDKType { + gas_wanted: Long; + gas_used: Long; +} +/** Result is the union of ResponseFormat and ResponseCheckTx. */ + +export interface Result { + /** + * Data is any data returned from message or handler execution. It MUST be + * length prefixed in order to separate data from multiple message executions. + * Deprecated. This field is still populated, but prefer msg_response instead + * because it also contains the Msg response typeURL. + */ + + /** @deprecated */ + data: Uint8Array; + /** Log contains the log information from message or handler execution. */ + + log: string; + /** + * Events contains a slice of Event objects that were emitted during message + * or handler execution. + */ + + events: Event[]; + /** + * msg_responses contains the Msg handler responses type packed in Anys. + * + * Since: cosmos-sdk 0.46 + */ + + msgResponses: Any[]; +} +/** Result is the union of ResponseFormat and ResponseCheckTx. */ + +export interface ResultSDKType { + /** @deprecated */ + data: Uint8Array; + log: string; + events: EventSDKType[]; + msg_responses: AnySDKType[]; +} +/** + * SimulationResponse defines the response generated when a transaction is + * successfully simulated. + */ + +export interface SimulationResponse { + gasInfo?: GasInfo; + result?: Result; +} +/** + * SimulationResponse defines the response generated when a transaction is + * successfully simulated. + */ + +export interface SimulationResponseSDKType { + gas_info?: GasInfoSDKType; + result?: ResultSDKType; +} +/** + * MsgData defines the data returned in a Result object during message + * execution. + */ + +/** @deprecated */ + +export interface MsgData { + msgType: string; + data: Uint8Array; +} +/** + * MsgData defines the data returned in a Result object during message + * execution. + */ + +/** @deprecated */ + +export interface MsgDataSDKType { + msg_type: string; + data: Uint8Array; +} +/** + * TxMsgData defines a list of MsgData. A transaction will have a MsgData object + * for each message. + */ + +export interface TxMsgData { + /** data field is deprecated and not populated. */ + + /** @deprecated */ + data: MsgData[]; + /** + * msg_responses contains the Msg handler responses packed into Anys. + * + * Since: cosmos-sdk 0.46 + */ + + msgResponses: Any[]; +} +/** + * TxMsgData defines a list of MsgData. A transaction will have a MsgData object + * for each message. + */ + +export interface TxMsgDataSDKType { + /** @deprecated */ + data: MsgDataSDKType[]; + msg_responses: AnySDKType[]; +} +/** SearchTxsResult defines a structure for querying txs pageable */ + +export interface SearchTxsResult { + /** Count of all txs */ + totalCount: Long; + /** Count of txs in current page */ + + count: Long; + /** Index of current page, start from 1 */ + + pageNumber: Long; + /** Count of total pages */ + + pageTotal: Long; + /** Max count txs per page */ + + limit: Long; + /** List of txs in current page */ + + txs: TxResponse[]; +} +/** SearchTxsResult defines a structure for querying txs pageable */ + +export interface SearchTxsResultSDKType { + total_count: Long; + count: Long; + page_number: Long; + page_total: Long; + limit: Long; + txs: TxResponseSDKType[]; +} + +function createBaseTxResponse(): TxResponse { + return { + height: Long.ZERO, + txhash: "", + codespace: "", + code: 0, + data: "", + rawLog: "", + logs: [], + info: "", + gasWanted: Long.ZERO, + gasUsed: Long.ZERO, + tx: undefined, + timestamp: "", + events: [] + }; +} + +export const TxResponse = { + encode(message: TxResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).int64(message.height); + } + + if (message.txhash !== "") { + writer.uint32(18).string(message.txhash); + } + + if (message.codespace !== "") { + writer.uint32(26).string(message.codespace); + } + + if (message.code !== 0) { + writer.uint32(32).uint32(message.code); + } + + if (message.data !== "") { + writer.uint32(42).string(message.data); + } + + if (message.rawLog !== "") { + writer.uint32(50).string(message.rawLog); + } + + for (const v of message.logs) { + ABCIMessageLog.encode(v!, writer.uint32(58).fork()).ldelim(); + } + + if (message.info !== "") { + writer.uint32(66).string(message.info); + } + + if (!message.gasWanted.isZero()) { + writer.uint32(72).int64(message.gasWanted); + } + + if (!message.gasUsed.isZero()) { + writer.uint32(80).int64(message.gasUsed); + } + + if (message.tx !== undefined) { + Any.encode(message.tx, writer.uint32(90).fork()).ldelim(); + } + + if (message.timestamp !== "") { + writer.uint32(98).string(message.timestamp); + } + + for (const v of message.events) { + Event.encode(v!, writer.uint32(106).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.int64() as Long); + break; + + case 2: + message.txhash = reader.string(); + break; + + case 3: + message.codespace = reader.string(); + break; + + case 4: + message.code = reader.uint32(); + break; + + case 5: + message.data = reader.string(); + break; + + case 6: + message.rawLog = reader.string(); + break; + + case 7: + message.logs.push(ABCIMessageLog.decode(reader, reader.uint32())); + break; + + case 8: + message.info = reader.string(); + break; + + case 9: + message.gasWanted = (reader.int64() as Long); + break; + + case 10: + message.gasUsed = (reader.int64() as Long); + break; + + case 11: + message.tx = Any.decode(reader, reader.uint32()); + break; + + case 12: + message.timestamp = reader.string(); + break; + + case 13: + message.events.push(Event.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TxResponse { + const message = createBaseTxResponse(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.txhash = object.txhash ?? ""; + message.codespace = object.codespace ?? ""; + message.code = object.code ?? 0; + message.data = object.data ?? ""; + message.rawLog = object.rawLog ?? ""; + message.logs = object.logs?.map(e => ABCIMessageLog.fromPartial(e)) || []; + message.info = object.info ?? ""; + message.gasWanted = object.gasWanted !== undefined && object.gasWanted !== null ? Long.fromValue(object.gasWanted) : Long.ZERO; + message.gasUsed = object.gasUsed !== undefined && object.gasUsed !== null ? Long.fromValue(object.gasUsed) : Long.ZERO; + message.tx = object.tx !== undefined && object.tx !== null ? Any.fromPartial(object.tx) : undefined; + message.timestamp = object.timestamp ?? ""; + message.events = object.events?.map(e => Event.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseABCIMessageLog(): ABCIMessageLog { + return { + msgIndex: 0, + log: "", + events: [] + }; +} + +export const ABCIMessageLog = { + encode(message: ABCIMessageLog, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.msgIndex !== 0) { + writer.uint32(8).uint32(message.msgIndex); + } + + if (message.log !== "") { + writer.uint32(18).string(message.log); + } + + for (const v of message.events) { + StringEvent.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ABCIMessageLog { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseABCIMessageLog(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.msgIndex = reader.uint32(); + break; + + case 2: + message.log = reader.string(); + break; + + case 3: + message.events.push(StringEvent.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ABCIMessageLog { + const message = createBaseABCIMessageLog(); + message.msgIndex = object.msgIndex ?? 0; + message.log = object.log ?? ""; + message.events = object.events?.map(e => StringEvent.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseStringEvent(): StringEvent { + return { + type: "", + attributes: [] + }; +} + +export const StringEvent = { + encode(message: StringEvent, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== "") { + writer.uint32(10).string(message.type); + } + + for (const v of message.attributes) { + Attribute.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StringEvent { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStringEvent(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.type = reader.string(); + break; + + case 2: + message.attributes.push(Attribute.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StringEvent { + const message = createBaseStringEvent(); + message.type = object.type ?? ""; + message.attributes = object.attributes?.map(e => Attribute.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseAttribute(): Attribute { + return { + key: "", + value: "" + }; +} + +export const Attribute = { + encode(message: Attribute, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Attribute { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAttribute(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.string(); + break; + + case 2: + message.value = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Attribute { + const message = createBaseAttribute(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + } + +}; + +function createBaseGasInfo(): GasInfo { + return { + gasWanted: Long.UZERO, + gasUsed: Long.UZERO + }; +} + +export const GasInfo = { + encode(message: GasInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.gasWanted.isZero()) { + writer.uint32(8).uint64(message.gasWanted); + } + + if (!message.gasUsed.isZero()) { + writer.uint32(16).uint64(message.gasUsed); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GasInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGasInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.gasWanted = (reader.uint64() as Long); + break; + + case 2: + message.gasUsed = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GasInfo { + const message = createBaseGasInfo(); + message.gasWanted = object.gasWanted !== undefined && object.gasWanted !== null ? Long.fromValue(object.gasWanted) : Long.UZERO; + message.gasUsed = object.gasUsed !== undefined && object.gasUsed !== null ? Long.fromValue(object.gasUsed) : Long.UZERO; + return message; + } + +}; + +function createBaseResult(): Result { + return { + data: new Uint8Array(), + log: "", + events: [], + msgResponses: [] + }; +} + +export const Result = { + encode(message: Result, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data.length !== 0) { + writer.uint32(10).bytes(message.data); + } + + if (message.log !== "") { + writer.uint32(18).string(message.log); + } + + for (const v of message.events) { + Event.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.msgResponses) { + Any.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Result { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResult(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.data = reader.bytes(); + break; + + case 2: + message.log = reader.string(); + break; + + case 3: + message.events.push(Event.decode(reader, reader.uint32())); + break; + + case 4: + message.msgResponses.push(Any.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Result { + const message = createBaseResult(); + message.data = object.data ?? new Uint8Array(); + message.log = object.log ?? ""; + message.events = object.events?.map(e => Event.fromPartial(e)) || []; + message.msgResponses = object.msgResponses?.map(e => Any.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseSimulationResponse(): SimulationResponse { + return { + gasInfo: undefined, + result: undefined + }; +} + +export const SimulationResponse = { + encode(message: SimulationResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.gasInfo !== undefined) { + GasInfo.encode(message.gasInfo, writer.uint32(10).fork()).ldelim(); + } + + if (message.result !== undefined) { + Result.encode(message.result, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SimulationResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSimulationResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.gasInfo = GasInfo.decode(reader, reader.uint32()); + break; + + case 2: + message.result = Result.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SimulationResponse { + const message = createBaseSimulationResponse(); + message.gasInfo = object.gasInfo !== undefined && object.gasInfo !== null ? GasInfo.fromPartial(object.gasInfo) : undefined; + message.result = object.result !== undefined && object.result !== null ? Result.fromPartial(object.result) : undefined; + return message; + } + +}; + +function createBaseMsgData(): MsgData { + return { + msgType: "", + data: new Uint8Array() + }; +} + +export const MsgData = { + encode(message: MsgData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.msgType !== "") { + writer.uint32(10).string(message.msgType); + } + + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.msgType = reader.string(); + break; + + case 2: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgData { + const message = createBaseMsgData(); + message.msgType = object.msgType ?? ""; + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseTxMsgData(): TxMsgData { + return { + data: [], + msgResponses: [] + }; +} + +export const TxMsgData = { + encode(message: TxMsgData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.data) { + MsgData.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.msgResponses) { + Any.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxMsgData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxMsgData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.data.push(MsgData.decode(reader, reader.uint32())); + break; + + case 2: + message.msgResponses.push(Any.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TxMsgData { + const message = createBaseTxMsgData(); + message.data = object.data?.map(e => MsgData.fromPartial(e)) || []; + message.msgResponses = object.msgResponses?.map(e => Any.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseSearchTxsResult(): SearchTxsResult { + return { + totalCount: Long.UZERO, + count: Long.UZERO, + pageNumber: Long.UZERO, + pageTotal: Long.UZERO, + limit: Long.UZERO, + txs: [] + }; +} + +export const SearchTxsResult = { + encode(message: SearchTxsResult, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.totalCount.isZero()) { + writer.uint32(8).uint64(message.totalCount); + } + + if (!message.count.isZero()) { + writer.uint32(16).uint64(message.count); + } + + if (!message.pageNumber.isZero()) { + writer.uint32(24).uint64(message.pageNumber); + } + + if (!message.pageTotal.isZero()) { + writer.uint32(32).uint64(message.pageTotal); + } + + if (!message.limit.isZero()) { + writer.uint32(40).uint64(message.limit); + } + + for (const v of message.txs) { + TxResponse.encode(v!, writer.uint32(50).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SearchTxsResult { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSearchTxsResult(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.totalCount = (reader.uint64() as Long); + break; + + case 2: + message.count = (reader.uint64() as Long); + break; + + case 3: + message.pageNumber = (reader.uint64() as Long); + break; + + case 4: + message.pageTotal = (reader.uint64() as Long); + break; + + case 5: + message.limit = (reader.uint64() as Long); + break; + + case 6: + message.txs.push(TxResponse.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SearchTxsResult { + const message = createBaseSearchTxsResult(); + message.totalCount = object.totalCount !== undefined && object.totalCount !== null ? Long.fromValue(object.totalCount) : Long.UZERO; + message.count = object.count !== undefined && object.count !== null ? Long.fromValue(object.count) : Long.UZERO; + message.pageNumber = object.pageNumber !== undefined && object.pageNumber !== null ? Long.fromValue(object.pageNumber) : Long.UZERO; + message.pageTotal = object.pageTotal !== undefined && object.pageTotal !== null ? Long.fromValue(object.pageTotal) : Long.UZERO; + message.limit = object.limit !== undefined && object.limit !== null ? Long.fromValue(object.limit) : Long.UZERO; + message.txs = object.txs?.map(e => TxResponse.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/base/kv/v1beta1/kv.ts b/packages/codegen/src/cosmos/base/kv/v1beta1/kv.ts new file mode 100644 index 00000000..8ce74b6f --- /dev/null +++ b/packages/codegen/src/cosmos/base/kv/v1beta1/kv.ts @@ -0,0 +1,124 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** Pairs defines a repeated slice of Pair objects. */ + +export interface Pairs { + pairs: Pair[]; +} +/** Pairs defines a repeated slice of Pair objects. */ + +export interface PairsSDKType { + pairs: PairSDKType[]; +} +/** Pair defines a key/value bytes tuple. */ + +export interface Pair { + key: Uint8Array; + value: Uint8Array; +} +/** Pair defines a key/value bytes tuple. */ + +export interface PairSDKType { + key: Uint8Array; + value: Uint8Array; +} + +function createBasePairs(): Pairs { + return { + pairs: [] + }; +} + +export const Pairs = { + encode(message: Pairs, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.pairs) { + Pair.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Pairs { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePairs(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pairs.push(Pair.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Pairs { + const message = createBasePairs(); + message.pairs = object.pairs?.map(e => Pair.fromPartial(e)) || []; + return message; + } + +}; + +function createBasePair(): Pair { + return { + key: new Uint8Array(), + value: new Uint8Array() + }; +} + +export const Pair = { + encode(message: Pair, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Pair { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePair(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + case 2: + message.value = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Pair { + const message = createBasePair(); + message.key = object.key ?? new Uint8Array(); + message.value = object.value ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/base/query/v1beta1/pagination.ts b/packages/codegen/src/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 00000000..193d96a4 --- /dev/null +++ b/packages/codegen/src/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,243 @@ +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ + +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + + offset: Long; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + + limit: Long; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + + reverse: boolean; +} +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ + +export interface PageRequestSDKType { + key: Uint8Array; + offset: Long; + limit: Long; + count_total: boolean; + reverse: boolean; +} +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + + total: Long; +} +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + +export interface PageResponseSDKType { + next_key: Uint8Array; + total: Long; +} + +function createBasePageRequest(): PageRequest { + return { + key: new Uint8Array(), + offset: Long.UZERO, + limit: Long.UZERO, + countTotal: false, + reverse: false + }; +} + +export const PageRequest = { + encode(message: PageRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + if (!message.offset.isZero()) { + writer.uint32(16).uint64(message.offset); + } + + if (!message.limit.isZero()) { + writer.uint32(24).uint64(message.limit); + } + + if (message.countTotal === true) { + writer.uint32(32).bool(message.countTotal); + } + + if (message.reverse === true) { + writer.uint32(40).bool(message.reverse); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + case 2: + message.offset = (reader.uint64() as Long); + break; + + case 3: + message.limit = (reader.uint64() as Long); + break; + + case 4: + message.countTotal = reader.bool(); + break; + + case 5: + message.reverse = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(); + message.offset = object.offset !== undefined && object.offset !== null ? Long.fromValue(object.offset) : Long.UZERO; + message.limit = object.limit !== undefined && object.limit !== null ? Long.fromValue(object.limit) : Long.UZERO; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + } + +}; + +function createBasePageResponse(): PageResponse { + return { + nextKey: new Uint8Array(), + total: Long.UZERO + }; +} + +export const PageResponse = { + encode(message: PageResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + + if (!message.total.isZero()) { + writer.uint32(16).uint64(message.total); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.nextKey = reader.bytes(); + break; + + case 2: + message.total = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(); + message.total = object.total !== undefined && object.total !== null ? Long.fromValue(object.total) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/base/reflection/v1beta1/reflection.ts b/packages/codegen/src/cosmos/base/reflection/v1beta1/reflection.ts new file mode 100644 index 00000000..17e84892 --- /dev/null +++ b/packages/codegen/src/cosmos/base/reflection/v1beta1/reflection.ts @@ -0,0 +1,221 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** ListAllInterfacesRequest is the request type of the ListAllInterfaces RPC. */ + +export interface ListAllInterfacesRequest {} +/** ListAllInterfacesRequest is the request type of the ListAllInterfaces RPC. */ + +export interface ListAllInterfacesRequestSDKType {} +/** ListAllInterfacesResponse is the response type of the ListAllInterfaces RPC. */ + +export interface ListAllInterfacesResponse { + /** interface_names is an array of all the registered interfaces. */ + interfaceNames: string[]; +} +/** ListAllInterfacesResponse is the response type of the ListAllInterfaces RPC. */ + +export interface ListAllInterfacesResponseSDKType { + interface_names: string[]; +} +/** + * ListImplementationsRequest is the request type of the ListImplementations + * RPC. + */ + +export interface ListImplementationsRequest { + /** interface_name defines the interface to query the implementations for. */ + interfaceName: string; +} +/** + * ListImplementationsRequest is the request type of the ListImplementations + * RPC. + */ + +export interface ListImplementationsRequestSDKType { + interface_name: string; +} +/** + * ListImplementationsResponse is the response type of the ListImplementations + * RPC. + */ + +export interface ListImplementationsResponse { + implementationMessageNames: string[]; +} +/** + * ListImplementationsResponse is the response type of the ListImplementations + * RPC. + */ + +export interface ListImplementationsResponseSDKType { + implementation_message_names: string[]; +} + +function createBaseListAllInterfacesRequest(): ListAllInterfacesRequest { + return {}; +} + +export const ListAllInterfacesRequest = { + encode(_: ListAllInterfacesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ListAllInterfacesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseListAllInterfacesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): ListAllInterfacesRequest { + const message = createBaseListAllInterfacesRequest(); + return message; + } + +}; + +function createBaseListAllInterfacesResponse(): ListAllInterfacesResponse { + return { + interfaceNames: [] + }; +} + +export const ListAllInterfacesResponse = { + encode(message: ListAllInterfacesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.interfaceNames) { + writer.uint32(10).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ListAllInterfacesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseListAllInterfacesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.interfaceNames.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ListAllInterfacesResponse { + const message = createBaseListAllInterfacesResponse(); + message.interfaceNames = object.interfaceNames?.map(e => e) || []; + return message; + } + +}; + +function createBaseListImplementationsRequest(): ListImplementationsRequest { + return { + interfaceName: "" + }; +} + +export const ListImplementationsRequest = { + encode(message: ListImplementationsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.interfaceName !== "") { + writer.uint32(10).string(message.interfaceName); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ListImplementationsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseListImplementationsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.interfaceName = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ListImplementationsRequest { + const message = createBaseListImplementationsRequest(); + message.interfaceName = object.interfaceName ?? ""; + return message; + } + +}; + +function createBaseListImplementationsResponse(): ListImplementationsResponse { + return { + implementationMessageNames: [] + }; +} + +export const ListImplementationsResponse = { + encode(message: ListImplementationsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.implementationMessageNames) { + writer.uint32(10).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ListImplementationsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseListImplementationsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.implementationMessageNames.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ListImplementationsResponse { + const message = createBaseListImplementationsResponse(); + message.implementationMessageNames = object.implementationMessageNames?.map(e => e) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/base/reflection/v2alpha1/reflection.ts b/packages/codegen/src/cosmos/base/reflection/v2alpha1/reflection.ts new file mode 100644 index 00000000..0f17dd05 --- /dev/null +++ b/packages/codegen/src/cosmos/base/reflection/v2alpha1/reflection.ts @@ -0,0 +1,1630 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** AppDescriptor describes a cosmos-sdk based application */ + +export interface AppDescriptor { + /** + * AuthnDescriptor provides information on how to authenticate transactions on the application + * NOTE: experimental and subject to change in future releases. + */ + authn?: AuthnDescriptor; + /** chain provides the chain descriptor */ + + chain?: ChainDescriptor; + /** codec provides metadata information regarding codec related types */ + + codec?: CodecDescriptor; + /** configuration provides metadata information regarding the sdk.Config type */ + + configuration?: ConfigurationDescriptor; + /** query_services provides metadata information regarding the available queriable endpoints */ + + queryServices?: QueryServicesDescriptor; + /** tx provides metadata information regarding how to send transactions to the given application */ + + tx?: TxDescriptor; +} +/** AppDescriptor describes a cosmos-sdk based application */ + +export interface AppDescriptorSDKType { + authn?: AuthnDescriptorSDKType; + chain?: ChainDescriptorSDKType; + codec?: CodecDescriptorSDKType; + configuration?: ConfigurationDescriptorSDKType; + query_services?: QueryServicesDescriptorSDKType; + tx?: TxDescriptorSDKType; +} +/** TxDescriptor describes the accepted transaction type */ + +export interface TxDescriptor { + /** + * fullname is the protobuf fullname of the raw transaction type (for instance the tx.Tx type) + * it is not meant to support polymorphism of transaction types, it is supposed to be used by + * reflection clients to understand if they can handle a specific transaction type in an application. + */ + fullname: string; + /** msgs lists the accepted application messages (sdk.Msg) */ + + msgs: MsgDescriptor[]; +} +/** TxDescriptor describes the accepted transaction type */ + +export interface TxDescriptorSDKType { + fullname: string; + msgs: MsgDescriptorSDKType[]; +} +/** + * AuthnDescriptor provides information on how to sign transactions without relying + * on the online RPCs GetTxMetadata and CombineUnsignedTxAndSignatures + */ + +export interface AuthnDescriptor { + /** sign_modes defines the supported signature algorithm */ + signModes: SigningModeDescriptor[]; +} +/** + * AuthnDescriptor provides information on how to sign transactions without relying + * on the online RPCs GetTxMetadata and CombineUnsignedTxAndSignatures + */ + +export interface AuthnDescriptorSDKType { + sign_modes: SigningModeDescriptorSDKType[]; +} +/** + * SigningModeDescriptor provides information on a signing flow of the application + * NOTE(fdymylja): here we could go as far as providing an entire flow on how + * to sign a message given a SigningModeDescriptor, but it's better to think about + * this another time + */ + +export interface SigningModeDescriptor { + /** name defines the unique name of the signing mode */ + name: string; + /** number is the unique int32 identifier for the sign_mode enum */ + + number: number; + /** + * authn_info_provider_method_fullname defines the fullname of the method to call to get + * the metadata required to authenticate using the provided sign_modes + */ + + authnInfoProviderMethodFullname: string; +} +/** + * SigningModeDescriptor provides information on a signing flow of the application + * NOTE(fdymylja): here we could go as far as providing an entire flow on how + * to sign a message given a SigningModeDescriptor, but it's better to think about + * this another time + */ + +export interface SigningModeDescriptorSDKType { + name: string; + number: number; + authn_info_provider_method_fullname: string; +} +/** ChainDescriptor describes chain information of the application */ + +export interface ChainDescriptor { + /** id is the chain id */ + id: string; +} +/** ChainDescriptor describes chain information of the application */ + +export interface ChainDescriptorSDKType { + id: string; +} +/** CodecDescriptor describes the registered interfaces and provides metadata information on the types */ + +export interface CodecDescriptor { + /** interfaces is a list of the registerted interfaces descriptors */ + interfaces: InterfaceDescriptor[]; +} +/** CodecDescriptor describes the registered interfaces and provides metadata information on the types */ + +export interface CodecDescriptorSDKType { + interfaces: InterfaceDescriptorSDKType[]; +} +/** InterfaceDescriptor describes the implementation of an interface */ + +export interface InterfaceDescriptor { + /** fullname is the name of the interface */ + fullname: string; + /** + * interface_accepting_messages contains information regarding the proto messages which contain the interface as + * google.protobuf.Any field + */ + + interfaceAcceptingMessages: InterfaceAcceptingMessageDescriptor[]; + /** interface_implementers is a list of the descriptors of the interface implementers */ + + interfaceImplementers: InterfaceImplementerDescriptor[]; +} +/** InterfaceDescriptor describes the implementation of an interface */ + +export interface InterfaceDescriptorSDKType { + fullname: string; + interface_accepting_messages: InterfaceAcceptingMessageDescriptorSDKType[]; + interface_implementers: InterfaceImplementerDescriptorSDKType[]; +} +/** InterfaceImplementerDescriptor describes an interface implementer */ + +export interface InterfaceImplementerDescriptor { + /** fullname is the protobuf queryable name of the interface implementer */ + fullname: string; + /** + * type_url defines the type URL used when marshalling the type as any + * this is required so we can provide type safe google.protobuf.Any marshalling and + * unmarshalling, making sure that we don't accept just 'any' type + * in our interface fields + */ + + typeUrl: string; +} +/** InterfaceImplementerDescriptor describes an interface implementer */ + +export interface InterfaceImplementerDescriptorSDKType { + fullname: string; + type_url: string; +} +/** + * InterfaceAcceptingMessageDescriptor describes a protobuf message which contains + * an interface represented as a google.protobuf.Any + */ + +export interface InterfaceAcceptingMessageDescriptor { + /** fullname is the protobuf fullname of the type containing the interface */ + fullname: string; + /** + * field_descriptor_names is a list of the protobuf name (not fullname) of the field + * which contains the interface as google.protobuf.Any (the interface is the same, but + * it can be in multiple fields of the same proto message) + */ + + fieldDescriptorNames: string[]; +} +/** + * InterfaceAcceptingMessageDescriptor describes a protobuf message which contains + * an interface represented as a google.protobuf.Any + */ + +export interface InterfaceAcceptingMessageDescriptorSDKType { + fullname: string; + field_descriptor_names: string[]; +} +/** ConfigurationDescriptor contains metadata information on the sdk.Config */ + +export interface ConfigurationDescriptor { + /** bech32_account_address_prefix is the account address prefix */ + bech32AccountAddressPrefix: string; +} +/** ConfigurationDescriptor contains metadata information on the sdk.Config */ + +export interface ConfigurationDescriptorSDKType { + bech32_account_address_prefix: string; +} +/** MsgDescriptor describes a cosmos-sdk message that can be delivered with a transaction */ + +export interface MsgDescriptor { + /** msg_type_url contains the TypeURL of a sdk.Msg. */ + msgTypeUrl: string; +} +/** MsgDescriptor describes a cosmos-sdk message that can be delivered with a transaction */ + +export interface MsgDescriptorSDKType { + msg_type_url: string; +} +/** GetAuthnDescriptorRequest is the request used for the GetAuthnDescriptor RPC */ + +export interface GetAuthnDescriptorRequest {} +/** GetAuthnDescriptorRequest is the request used for the GetAuthnDescriptor RPC */ + +export interface GetAuthnDescriptorRequestSDKType {} +/** GetAuthnDescriptorResponse is the response returned by the GetAuthnDescriptor RPC */ + +export interface GetAuthnDescriptorResponse { + /** authn describes how to authenticate to the application when sending transactions */ + authn?: AuthnDescriptor; +} +/** GetAuthnDescriptorResponse is the response returned by the GetAuthnDescriptor RPC */ + +export interface GetAuthnDescriptorResponseSDKType { + authn?: AuthnDescriptorSDKType; +} +/** GetChainDescriptorRequest is the request used for the GetChainDescriptor RPC */ + +export interface GetChainDescriptorRequest {} +/** GetChainDescriptorRequest is the request used for the GetChainDescriptor RPC */ + +export interface GetChainDescriptorRequestSDKType {} +/** GetChainDescriptorResponse is the response returned by the GetChainDescriptor RPC */ + +export interface GetChainDescriptorResponse { + /** chain describes application chain information */ + chain?: ChainDescriptor; +} +/** GetChainDescriptorResponse is the response returned by the GetChainDescriptor RPC */ + +export interface GetChainDescriptorResponseSDKType { + chain?: ChainDescriptorSDKType; +} +/** GetCodecDescriptorRequest is the request used for the GetCodecDescriptor RPC */ + +export interface GetCodecDescriptorRequest {} +/** GetCodecDescriptorRequest is the request used for the GetCodecDescriptor RPC */ + +export interface GetCodecDescriptorRequestSDKType {} +/** GetCodecDescriptorResponse is the response returned by the GetCodecDescriptor RPC */ + +export interface GetCodecDescriptorResponse { + /** codec describes the application codec such as registered interfaces and implementations */ + codec?: CodecDescriptor; +} +/** GetCodecDescriptorResponse is the response returned by the GetCodecDescriptor RPC */ + +export interface GetCodecDescriptorResponseSDKType { + codec?: CodecDescriptorSDKType; +} +/** GetConfigurationDescriptorRequest is the request used for the GetConfigurationDescriptor RPC */ + +export interface GetConfigurationDescriptorRequest {} +/** GetConfigurationDescriptorRequest is the request used for the GetConfigurationDescriptor RPC */ + +export interface GetConfigurationDescriptorRequestSDKType {} +/** GetConfigurationDescriptorResponse is the response returned by the GetConfigurationDescriptor RPC */ + +export interface GetConfigurationDescriptorResponse { + /** config describes the application's sdk.Config */ + config?: ConfigurationDescriptor; +} +/** GetConfigurationDescriptorResponse is the response returned by the GetConfigurationDescriptor RPC */ + +export interface GetConfigurationDescriptorResponseSDKType { + config?: ConfigurationDescriptorSDKType; +} +/** GetQueryServicesDescriptorRequest is the request used for the GetQueryServicesDescriptor RPC */ + +export interface GetQueryServicesDescriptorRequest {} +/** GetQueryServicesDescriptorRequest is the request used for the GetQueryServicesDescriptor RPC */ + +export interface GetQueryServicesDescriptorRequestSDKType {} +/** GetQueryServicesDescriptorResponse is the response returned by the GetQueryServicesDescriptor RPC */ + +export interface GetQueryServicesDescriptorResponse { + /** queries provides information on the available queryable services */ + queries?: QueryServicesDescriptor; +} +/** GetQueryServicesDescriptorResponse is the response returned by the GetQueryServicesDescriptor RPC */ + +export interface GetQueryServicesDescriptorResponseSDKType { + queries?: QueryServicesDescriptorSDKType; +} +/** GetTxDescriptorRequest is the request used for the GetTxDescriptor RPC */ + +export interface GetTxDescriptorRequest {} +/** GetTxDescriptorRequest is the request used for the GetTxDescriptor RPC */ + +export interface GetTxDescriptorRequestSDKType {} +/** GetTxDescriptorResponse is the response returned by the GetTxDescriptor RPC */ + +export interface GetTxDescriptorResponse { + /** + * tx provides information on msgs that can be forwarded to the application + * alongside the accepted transaction protobuf type + */ + tx?: TxDescriptor; +} +/** GetTxDescriptorResponse is the response returned by the GetTxDescriptor RPC */ + +export interface GetTxDescriptorResponseSDKType { + tx?: TxDescriptorSDKType; +} +/** QueryServicesDescriptor contains the list of cosmos-sdk queriable services */ + +export interface QueryServicesDescriptor { + /** query_services is a list of cosmos-sdk QueryServiceDescriptor */ + queryServices: QueryServiceDescriptor[]; +} +/** QueryServicesDescriptor contains the list of cosmos-sdk queriable services */ + +export interface QueryServicesDescriptorSDKType { + query_services: QueryServiceDescriptorSDKType[]; +} +/** QueryServiceDescriptor describes a cosmos-sdk queryable service */ + +export interface QueryServiceDescriptor { + /** fullname is the protobuf fullname of the service descriptor */ + fullname: string; + /** is_module describes if this service is actually exposed by an application's module */ + + isModule: boolean; + /** methods provides a list of query service methods */ + + methods: QueryMethodDescriptor[]; +} +/** QueryServiceDescriptor describes a cosmos-sdk queryable service */ + +export interface QueryServiceDescriptorSDKType { + fullname: string; + is_module: boolean; + methods: QueryMethodDescriptorSDKType[]; +} +/** + * QueryMethodDescriptor describes a queryable method of a query service + * no other info is provided beside method name and tendermint queryable path + * because it would be redundant with the grpc reflection service + */ + +export interface QueryMethodDescriptor { + /** name is the protobuf name (not fullname) of the method */ + name: string; + /** + * full_query_path is the path that can be used to query + * this method via tendermint abci.Query + */ + + fullQueryPath: string; +} +/** + * QueryMethodDescriptor describes a queryable method of a query service + * no other info is provided beside method name and tendermint queryable path + * because it would be redundant with the grpc reflection service + */ + +export interface QueryMethodDescriptorSDKType { + name: string; + full_query_path: string; +} + +function createBaseAppDescriptor(): AppDescriptor { + return { + authn: undefined, + chain: undefined, + codec: undefined, + configuration: undefined, + queryServices: undefined, + tx: undefined + }; +} + +export const AppDescriptor = { + encode(message: AppDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authn !== undefined) { + AuthnDescriptor.encode(message.authn, writer.uint32(10).fork()).ldelim(); + } + + if (message.chain !== undefined) { + ChainDescriptor.encode(message.chain, writer.uint32(18).fork()).ldelim(); + } + + if (message.codec !== undefined) { + CodecDescriptor.encode(message.codec, writer.uint32(26).fork()).ldelim(); + } + + if (message.configuration !== undefined) { + ConfigurationDescriptor.encode(message.configuration, writer.uint32(34).fork()).ldelim(); + } + + if (message.queryServices !== undefined) { + QueryServicesDescriptor.encode(message.queryServices, writer.uint32(42).fork()).ldelim(); + } + + if (message.tx !== undefined) { + TxDescriptor.encode(message.tx, writer.uint32(50).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AppDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAppDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authn = AuthnDescriptor.decode(reader, reader.uint32()); + break; + + case 2: + message.chain = ChainDescriptor.decode(reader, reader.uint32()); + break; + + case 3: + message.codec = CodecDescriptor.decode(reader, reader.uint32()); + break; + + case 4: + message.configuration = ConfigurationDescriptor.decode(reader, reader.uint32()); + break; + + case 5: + message.queryServices = QueryServicesDescriptor.decode(reader, reader.uint32()); + break; + + case 6: + message.tx = TxDescriptor.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AppDescriptor { + const message = createBaseAppDescriptor(); + message.authn = object.authn !== undefined && object.authn !== null ? AuthnDescriptor.fromPartial(object.authn) : undefined; + message.chain = object.chain !== undefined && object.chain !== null ? ChainDescriptor.fromPartial(object.chain) : undefined; + message.codec = object.codec !== undefined && object.codec !== null ? CodecDescriptor.fromPartial(object.codec) : undefined; + message.configuration = object.configuration !== undefined && object.configuration !== null ? ConfigurationDescriptor.fromPartial(object.configuration) : undefined; + message.queryServices = object.queryServices !== undefined && object.queryServices !== null ? QueryServicesDescriptor.fromPartial(object.queryServices) : undefined; + message.tx = object.tx !== undefined && object.tx !== null ? TxDescriptor.fromPartial(object.tx) : undefined; + return message; + } + +}; + +function createBaseTxDescriptor(): TxDescriptor { + return { + fullname: "", + msgs: [] + }; +} + +export const TxDescriptor = { + encode(message: TxDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fullname !== "") { + writer.uint32(10).string(message.fullname); + } + + for (const v of message.msgs) { + MsgDescriptor.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.fullname = reader.string(); + break; + + case 2: + message.msgs.push(MsgDescriptor.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TxDescriptor { + const message = createBaseTxDescriptor(); + message.fullname = object.fullname ?? ""; + message.msgs = object.msgs?.map(e => MsgDescriptor.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseAuthnDescriptor(): AuthnDescriptor { + return { + signModes: [] + }; +} + +export const AuthnDescriptor = { + encode(message: AuthnDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.signModes) { + SigningModeDescriptor.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AuthnDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAuthnDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.signModes.push(SigningModeDescriptor.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AuthnDescriptor { + const message = createBaseAuthnDescriptor(); + message.signModes = object.signModes?.map(e => SigningModeDescriptor.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseSigningModeDescriptor(): SigningModeDescriptor { + return { + name: "", + number: 0, + authnInfoProviderMethodFullname: "" + }; +} + +export const SigningModeDescriptor = { + encode(message: SigningModeDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + + if (message.authnInfoProviderMethodFullname !== "") { + writer.uint32(26).string(message.authnInfoProviderMethodFullname); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SigningModeDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSigningModeDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.number = reader.int32(); + break; + + case 3: + message.authnInfoProviderMethodFullname = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SigningModeDescriptor { + const message = createBaseSigningModeDescriptor(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.authnInfoProviderMethodFullname = object.authnInfoProviderMethodFullname ?? ""; + return message; + } + +}; + +function createBaseChainDescriptor(): ChainDescriptor { + return { + id: "" + }; +} + +export const ChainDescriptor = { + encode(message: ChainDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ChainDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseChainDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.id = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ChainDescriptor { + const message = createBaseChainDescriptor(); + message.id = object.id ?? ""; + return message; + } + +}; + +function createBaseCodecDescriptor(): CodecDescriptor { + return { + interfaces: [] + }; +} + +export const CodecDescriptor = { + encode(message: CodecDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.interfaces) { + InterfaceDescriptor.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CodecDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCodecDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.interfaces.push(InterfaceDescriptor.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CodecDescriptor { + const message = createBaseCodecDescriptor(); + message.interfaces = object.interfaces?.map(e => InterfaceDescriptor.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { + fullname: "", + interfaceAcceptingMessages: [], + interfaceImplementers: [] + }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fullname !== "") { + writer.uint32(10).string(message.fullname); + } + + for (const v of message.interfaceAcceptingMessages) { + InterfaceAcceptingMessageDescriptor.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.interfaceImplementers) { + InterfaceImplementerDescriptor.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.fullname = reader.string(); + break; + + case 2: + message.interfaceAcceptingMessages.push(InterfaceAcceptingMessageDescriptor.decode(reader, reader.uint32())); + break; + + case 3: + message.interfaceImplementers.push(InterfaceImplementerDescriptor.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.fullname = object.fullname ?? ""; + message.interfaceAcceptingMessages = object.interfaceAcceptingMessages?.map(e => InterfaceAcceptingMessageDescriptor.fromPartial(e)) || []; + message.interfaceImplementers = object.interfaceImplementers?.map(e => InterfaceImplementerDescriptor.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseInterfaceImplementerDescriptor(): InterfaceImplementerDescriptor { + return { + fullname: "", + typeUrl: "" + }; +} + +export const InterfaceImplementerDescriptor = { + encode(message: InterfaceImplementerDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fullname !== "") { + writer.uint32(10).string(message.fullname); + } + + if (message.typeUrl !== "") { + writer.uint32(18).string(message.typeUrl); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceImplementerDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceImplementerDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.fullname = reader.string(); + break; + + case 2: + message.typeUrl = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): InterfaceImplementerDescriptor { + const message = createBaseInterfaceImplementerDescriptor(); + message.fullname = object.fullname ?? ""; + message.typeUrl = object.typeUrl ?? ""; + return message; + } + +}; + +function createBaseInterfaceAcceptingMessageDescriptor(): InterfaceAcceptingMessageDescriptor { + return { + fullname: "", + fieldDescriptorNames: [] + }; +} + +export const InterfaceAcceptingMessageDescriptor = { + encode(message: InterfaceAcceptingMessageDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fullname !== "") { + writer.uint32(10).string(message.fullname); + } + + for (const v of message.fieldDescriptorNames) { + writer.uint32(18).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceAcceptingMessageDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceAcceptingMessageDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.fullname = reader.string(); + break; + + case 2: + message.fieldDescriptorNames.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): InterfaceAcceptingMessageDescriptor { + const message = createBaseInterfaceAcceptingMessageDescriptor(); + message.fullname = object.fullname ?? ""; + message.fieldDescriptorNames = object.fieldDescriptorNames?.map(e => e) || []; + return message; + } + +}; + +function createBaseConfigurationDescriptor(): ConfigurationDescriptor { + return { + bech32AccountAddressPrefix: "" + }; +} + +export const ConfigurationDescriptor = { + encode(message: ConfigurationDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bech32AccountAddressPrefix !== "") { + writer.uint32(10).string(message.bech32AccountAddressPrefix); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConfigurationDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConfigurationDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.bech32AccountAddressPrefix = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConfigurationDescriptor { + const message = createBaseConfigurationDescriptor(); + message.bech32AccountAddressPrefix = object.bech32AccountAddressPrefix ?? ""; + return message; + } + +}; + +function createBaseMsgDescriptor(): MsgDescriptor { + return { + msgTypeUrl: "" + }; +} + +export const MsgDescriptor = { + encode(message: MsgDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.msgTypeUrl !== "") { + writer.uint32(10).string(message.msgTypeUrl); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.msgTypeUrl = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgDescriptor { + const message = createBaseMsgDescriptor(); + message.msgTypeUrl = object.msgTypeUrl ?? ""; + return message; + } + +}; + +function createBaseGetAuthnDescriptorRequest(): GetAuthnDescriptorRequest { + return {}; +} + +export const GetAuthnDescriptorRequest = { + encode(_: GetAuthnDescriptorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetAuthnDescriptorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetAuthnDescriptorRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): GetAuthnDescriptorRequest { + const message = createBaseGetAuthnDescriptorRequest(); + return message; + } + +}; + +function createBaseGetAuthnDescriptorResponse(): GetAuthnDescriptorResponse { + return { + authn: undefined + }; +} + +export const GetAuthnDescriptorResponse = { + encode(message: GetAuthnDescriptorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authn !== undefined) { + AuthnDescriptor.encode(message.authn, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetAuthnDescriptorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetAuthnDescriptorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authn = AuthnDescriptor.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetAuthnDescriptorResponse { + const message = createBaseGetAuthnDescriptorResponse(); + message.authn = object.authn !== undefined && object.authn !== null ? AuthnDescriptor.fromPartial(object.authn) : undefined; + return message; + } + +}; + +function createBaseGetChainDescriptorRequest(): GetChainDescriptorRequest { + return {}; +} + +export const GetChainDescriptorRequest = { + encode(_: GetChainDescriptorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetChainDescriptorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetChainDescriptorRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): GetChainDescriptorRequest { + const message = createBaseGetChainDescriptorRequest(); + return message; + } + +}; + +function createBaseGetChainDescriptorResponse(): GetChainDescriptorResponse { + return { + chain: undefined + }; +} + +export const GetChainDescriptorResponse = { + encode(message: GetChainDescriptorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.chain !== undefined) { + ChainDescriptor.encode(message.chain, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetChainDescriptorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetChainDescriptorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.chain = ChainDescriptor.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetChainDescriptorResponse { + const message = createBaseGetChainDescriptorResponse(); + message.chain = object.chain !== undefined && object.chain !== null ? ChainDescriptor.fromPartial(object.chain) : undefined; + return message; + } + +}; + +function createBaseGetCodecDescriptorRequest(): GetCodecDescriptorRequest { + return {}; +} + +export const GetCodecDescriptorRequest = { + encode(_: GetCodecDescriptorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetCodecDescriptorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetCodecDescriptorRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): GetCodecDescriptorRequest { + const message = createBaseGetCodecDescriptorRequest(); + return message; + } + +}; + +function createBaseGetCodecDescriptorResponse(): GetCodecDescriptorResponse { + return { + codec: undefined + }; +} + +export const GetCodecDescriptorResponse = { + encode(message: GetCodecDescriptorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.codec !== undefined) { + CodecDescriptor.encode(message.codec, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetCodecDescriptorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetCodecDescriptorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.codec = CodecDescriptor.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetCodecDescriptorResponse { + const message = createBaseGetCodecDescriptorResponse(); + message.codec = object.codec !== undefined && object.codec !== null ? CodecDescriptor.fromPartial(object.codec) : undefined; + return message; + } + +}; + +function createBaseGetConfigurationDescriptorRequest(): GetConfigurationDescriptorRequest { + return {}; +} + +export const GetConfigurationDescriptorRequest = { + encode(_: GetConfigurationDescriptorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetConfigurationDescriptorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetConfigurationDescriptorRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): GetConfigurationDescriptorRequest { + const message = createBaseGetConfigurationDescriptorRequest(); + return message; + } + +}; + +function createBaseGetConfigurationDescriptorResponse(): GetConfigurationDescriptorResponse { + return { + config: undefined + }; +} + +export const GetConfigurationDescriptorResponse = { + encode(message: GetConfigurationDescriptorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.config !== undefined) { + ConfigurationDescriptor.encode(message.config, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetConfigurationDescriptorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetConfigurationDescriptorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.config = ConfigurationDescriptor.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetConfigurationDescriptorResponse { + const message = createBaseGetConfigurationDescriptorResponse(); + message.config = object.config !== undefined && object.config !== null ? ConfigurationDescriptor.fromPartial(object.config) : undefined; + return message; + } + +}; + +function createBaseGetQueryServicesDescriptorRequest(): GetQueryServicesDescriptorRequest { + return {}; +} + +export const GetQueryServicesDescriptorRequest = { + encode(_: GetQueryServicesDescriptorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetQueryServicesDescriptorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetQueryServicesDescriptorRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): GetQueryServicesDescriptorRequest { + const message = createBaseGetQueryServicesDescriptorRequest(); + return message; + } + +}; + +function createBaseGetQueryServicesDescriptorResponse(): GetQueryServicesDescriptorResponse { + return { + queries: undefined + }; +} + +export const GetQueryServicesDescriptorResponse = { + encode(message: GetQueryServicesDescriptorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.queries !== undefined) { + QueryServicesDescriptor.encode(message.queries, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetQueryServicesDescriptorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetQueryServicesDescriptorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.queries = QueryServicesDescriptor.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetQueryServicesDescriptorResponse { + const message = createBaseGetQueryServicesDescriptorResponse(); + message.queries = object.queries !== undefined && object.queries !== null ? QueryServicesDescriptor.fromPartial(object.queries) : undefined; + return message; + } + +}; + +function createBaseGetTxDescriptorRequest(): GetTxDescriptorRequest { + return {}; +} + +export const GetTxDescriptorRequest = { + encode(_: GetTxDescriptorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxDescriptorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetTxDescriptorRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): GetTxDescriptorRequest { + const message = createBaseGetTxDescriptorRequest(); + return message; + } + +}; + +function createBaseGetTxDescriptorResponse(): GetTxDescriptorResponse { + return { + tx: undefined + }; +} + +export const GetTxDescriptorResponse = { + encode(message: GetTxDescriptorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tx !== undefined) { + TxDescriptor.encode(message.tx, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxDescriptorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetTxDescriptorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.tx = TxDescriptor.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetTxDescriptorResponse { + const message = createBaseGetTxDescriptorResponse(); + message.tx = object.tx !== undefined && object.tx !== null ? TxDescriptor.fromPartial(object.tx) : undefined; + return message; + } + +}; + +function createBaseQueryServicesDescriptor(): QueryServicesDescriptor { + return { + queryServices: [] + }; +} + +export const QueryServicesDescriptor = { + encode(message: QueryServicesDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.queryServices) { + QueryServiceDescriptor.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryServicesDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryServicesDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.queryServices.push(QueryServiceDescriptor.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryServicesDescriptor { + const message = createBaseQueryServicesDescriptor(); + message.queryServices = object.queryServices?.map(e => QueryServiceDescriptor.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseQueryServiceDescriptor(): QueryServiceDescriptor { + return { + fullname: "", + isModule: false, + methods: [] + }; +} + +export const QueryServiceDescriptor = { + encode(message: QueryServiceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fullname !== "") { + writer.uint32(10).string(message.fullname); + } + + if (message.isModule === true) { + writer.uint32(16).bool(message.isModule); + } + + for (const v of message.methods) { + QueryMethodDescriptor.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryServiceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryServiceDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.fullname = reader.string(); + break; + + case 2: + message.isModule = reader.bool(); + break; + + case 3: + message.methods.push(QueryMethodDescriptor.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryServiceDescriptor { + const message = createBaseQueryServiceDescriptor(); + message.fullname = object.fullname ?? ""; + message.isModule = object.isModule ?? false; + message.methods = object.methods?.map(e => QueryMethodDescriptor.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseQueryMethodDescriptor(): QueryMethodDescriptor { + return { + name: "", + fullQueryPath: "" + }; +} + +export const QueryMethodDescriptor = { + encode(message: QueryMethodDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.fullQueryPath !== "") { + writer.uint32(18).string(message.fullQueryPath); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryMethodDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMethodDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.fullQueryPath = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryMethodDescriptor { + const message = createBaseQueryMethodDescriptor(); + message.name = object.name ?? ""; + message.fullQueryPath = object.fullQueryPath ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/base/snapshots/v1beta1/snapshot.ts b/packages/codegen/src/cosmos/base/snapshots/v1beta1/snapshot.ts new file mode 100644 index 00000000..0390b680 --- /dev/null +++ b/packages/codegen/src/cosmos/base/snapshots/v1beta1/snapshot.ts @@ -0,0 +1,670 @@ +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** Snapshot contains Tendermint state sync snapshot info. */ + +export interface Snapshot { + height: Long; + format: number; + chunks: number; + hash: Uint8Array; + metadata?: Metadata; +} +/** Snapshot contains Tendermint state sync snapshot info. */ + +export interface SnapshotSDKType { + height: Long; + format: number; + chunks: number; + hash: Uint8Array; + metadata?: MetadataSDKType; +} +/** Metadata contains SDK-specific snapshot metadata. */ + +export interface Metadata { + /** SHA-256 chunk hashes */ + chunkHashes: Uint8Array[]; +} +/** Metadata contains SDK-specific snapshot metadata. */ + +export interface MetadataSDKType { + chunk_hashes: Uint8Array[]; +} +/** SnapshotItem is an item contained in a rootmulti.Store snapshot. */ + +export interface SnapshotItem { + store?: SnapshotStoreItem; + iavl?: SnapshotIAVLItem; + extension?: SnapshotExtensionMeta; + extensionPayload?: SnapshotExtensionPayload; + kv?: SnapshotKVItem; + schema?: SnapshotSchema; +} +/** SnapshotItem is an item contained in a rootmulti.Store snapshot. */ + +export interface SnapshotItemSDKType { + store?: SnapshotStoreItemSDKType; + iavl?: SnapshotIAVLItemSDKType; + extension?: SnapshotExtensionMetaSDKType; + extension_payload?: SnapshotExtensionPayloadSDKType; + kv?: SnapshotKVItemSDKType; + schema?: SnapshotSchemaSDKType; +} +/** SnapshotStoreItem contains metadata about a snapshotted store. */ + +export interface SnapshotStoreItem { + name: string; +} +/** SnapshotStoreItem contains metadata about a snapshotted store. */ + +export interface SnapshotStoreItemSDKType { + name: string; +} +/** SnapshotIAVLItem is an exported IAVL node. */ + +export interface SnapshotIAVLItem { + key: Uint8Array; + value: Uint8Array; + /** version is block height */ + + version: Long; + /** height is depth of the tree. */ + + height: number; +} +/** SnapshotIAVLItem is an exported IAVL node. */ + +export interface SnapshotIAVLItemSDKType { + key: Uint8Array; + value: Uint8Array; + version: Long; + height: number; +} +/** SnapshotExtensionMeta contains metadata about an external snapshotter. */ + +export interface SnapshotExtensionMeta { + name: string; + format: number; +} +/** SnapshotExtensionMeta contains metadata about an external snapshotter. */ + +export interface SnapshotExtensionMetaSDKType { + name: string; + format: number; +} +/** SnapshotExtensionPayload contains payloads of an external snapshotter. */ + +export interface SnapshotExtensionPayload { + payload: Uint8Array; +} +/** SnapshotExtensionPayload contains payloads of an external snapshotter. */ + +export interface SnapshotExtensionPayloadSDKType { + payload: Uint8Array; +} +/** SnapshotKVItem is an exported Key/Value Pair */ + +export interface SnapshotKVItem { + key: Uint8Array; + value: Uint8Array; +} +/** SnapshotKVItem is an exported Key/Value Pair */ + +export interface SnapshotKVItemSDKType { + key: Uint8Array; + value: Uint8Array; +} +/** SnapshotSchema is an exported schema of smt store */ + +export interface SnapshotSchema { + keys: Uint8Array[]; +} +/** SnapshotSchema is an exported schema of smt store */ + +export interface SnapshotSchemaSDKType { + keys: Uint8Array[]; +} + +function createBaseSnapshot(): Snapshot { + return { + height: Long.UZERO, + format: 0, + chunks: 0, + hash: new Uint8Array(), + metadata: undefined + }; +} + +export const Snapshot = { + encode(message: Snapshot, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).uint64(message.height); + } + + if (message.format !== 0) { + writer.uint32(16).uint32(message.format); + } + + if (message.chunks !== 0) { + writer.uint32(24).uint32(message.chunks); + } + + if (message.hash.length !== 0) { + writer.uint32(34).bytes(message.hash); + } + + if (message.metadata !== undefined) { + Metadata.encode(message.metadata, writer.uint32(42).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Snapshot { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSnapshot(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.uint64() as Long); + break; + + case 2: + message.format = reader.uint32(); + break; + + case 3: + message.chunks = reader.uint32(); + break; + + case 4: + message.hash = reader.bytes(); + break; + + case 5: + message.metadata = Metadata.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Snapshot { + const message = createBaseSnapshot(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.UZERO; + message.format = object.format ?? 0; + message.chunks = object.chunks ?? 0; + message.hash = object.hash ?? new Uint8Array(); + message.metadata = object.metadata !== undefined && object.metadata !== null ? Metadata.fromPartial(object.metadata) : undefined; + return message; + } + +}; + +function createBaseMetadata(): Metadata { + return { + chunkHashes: [] + }; +} + +export const Metadata = { + encode(message: Metadata, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.chunkHashes) { + writer.uint32(10).bytes(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Metadata { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMetadata(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.chunkHashes.push(reader.bytes()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Metadata { + const message = createBaseMetadata(); + message.chunkHashes = object.chunkHashes?.map(e => e) || []; + return message; + } + +}; + +function createBaseSnapshotItem(): SnapshotItem { + return { + store: undefined, + iavl: undefined, + extension: undefined, + extensionPayload: undefined, + kv: undefined, + schema: undefined + }; +} + +export const SnapshotItem = { + encode(message: SnapshotItem, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.store !== undefined) { + SnapshotStoreItem.encode(message.store, writer.uint32(10).fork()).ldelim(); + } + + if (message.iavl !== undefined) { + SnapshotIAVLItem.encode(message.iavl, writer.uint32(18).fork()).ldelim(); + } + + if (message.extension !== undefined) { + SnapshotExtensionMeta.encode(message.extension, writer.uint32(26).fork()).ldelim(); + } + + if (message.extensionPayload !== undefined) { + SnapshotExtensionPayload.encode(message.extensionPayload, writer.uint32(34).fork()).ldelim(); + } + + if (message.kv !== undefined) { + SnapshotKVItem.encode(message.kv, writer.uint32(42).fork()).ldelim(); + } + + if (message.schema !== undefined) { + SnapshotSchema.encode(message.schema, writer.uint32(50).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotItem { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSnapshotItem(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.store = SnapshotStoreItem.decode(reader, reader.uint32()); + break; + + case 2: + message.iavl = SnapshotIAVLItem.decode(reader, reader.uint32()); + break; + + case 3: + message.extension = SnapshotExtensionMeta.decode(reader, reader.uint32()); + break; + + case 4: + message.extensionPayload = SnapshotExtensionPayload.decode(reader, reader.uint32()); + break; + + case 5: + message.kv = SnapshotKVItem.decode(reader, reader.uint32()); + break; + + case 6: + message.schema = SnapshotSchema.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SnapshotItem { + const message = createBaseSnapshotItem(); + message.store = object.store !== undefined && object.store !== null ? SnapshotStoreItem.fromPartial(object.store) : undefined; + message.iavl = object.iavl !== undefined && object.iavl !== null ? SnapshotIAVLItem.fromPartial(object.iavl) : undefined; + message.extension = object.extension !== undefined && object.extension !== null ? SnapshotExtensionMeta.fromPartial(object.extension) : undefined; + message.extensionPayload = object.extensionPayload !== undefined && object.extensionPayload !== null ? SnapshotExtensionPayload.fromPartial(object.extensionPayload) : undefined; + message.kv = object.kv !== undefined && object.kv !== null ? SnapshotKVItem.fromPartial(object.kv) : undefined; + message.schema = object.schema !== undefined && object.schema !== null ? SnapshotSchema.fromPartial(object.schema) : undefined; + return message; + } + +}; + +function createBaseSnapshotStoreItem(): SnapshotStoreItem { + return { + name: "" + }; +} + +export const SnapshotStoreItem = { + encode(message: SnapshotStoreItem, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotStoreItem { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSnapshotStoreItem(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SnapshotStoreItem { + const message = createBaseSnapshotStoreItem(); + message.name = object.name ?? ""; + return message; + } + +}; + +function createBaseSnapshotIAVLItem(): SnapshotIAVLItem { + return { + key: new Uint8Array(), + value: new Uint8Array(), + version: Long.ZERO, + height: 0 + }; +} + +export const SnapshotIAVLItem = { + encode(message: SnapshotIAVLItem, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + + if (!message.version.isZero()) { + writer.uint32(24).int64(message.version); + } + + if (message.height !== 0) { + writer.uint32(32).int32(message.height); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotIAVLItem { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSnapshotIAVLItem(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + case 2: + message.value = reader.bytes(); + break; + + case 3: + message.version = (reader.int64() as Long); + break; + + case 4: + message.height = reader.int32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SnapshotIAVLItem { + const message = createBaseSnapshotIAVLItem(); + message.key = object.key ?? new Uint8Array(); + message.value = object.value ?? new Uint8Array(); + message.version = object.version !== undefined && object.version !== null ? Long.fromValue(object.version) : Long.ZERO; + message.height = object.height ?? 0; + return message; + } + +}; + +function createBaseSnapshotExtensionMeta(): SnapshotExtensionMeta { + return { + name: "", + format: 0 + }; +} + +export const SnapshotExtensionMeta = { + encode(message: SnapshotExtensionMeta, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.format !== 0) { + writer.uint32(16).uint32(message.format); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotExtensionMeta { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSnapshotExtensionMeta(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.format = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SnapshotExtensionMeta { + const message = createBaseSnapshotExtensionMeta(); + message.name = object.name ?? ""; + message.format = object.format ?? 0; + return message; + } + +}; + +function createBaseSnapshotExtensionPayload(): SnapshotExtensionPayload { + return { + payload: new Uint8Array() + }; +} + +export const SnapshotExtensionPayload = { + encode(message: SnapshotExtensionPayload, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.payload.length !== 0) { + writer.uint32(10).bytes(message.payload); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotExtensionPayload { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSnapshotExtensionPayload(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.payload = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SnapshotExtensionPayload { + const message = createBaseSnapshotExtensionPayload(); + message.payload = object.payload ?? new Uint8Array(); + return message; + } + +}; + +function createBaseSnapshotKVItem(): SnapshotKVItem { + return { + key: new Uint8Array(), + value: new Uint8Array() + }; +} + +export const SnapshotKVItem = { + encode(message: SnapshotKVItem, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotKVItem { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSnapshotKVItem(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + case 2: + message.value = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SnapshotKVItem { + const message = createBaseSnapshotKVItem(); + message.key = object.key ?? new Uint8Array(); + message.value = object.value ?? new Uint8Array(); + return message; + } + +}; + +function createBaseSnapshotSchema(): SnapshotSchema { + return { + keys: [] + }; +} + +export const SnapshotSchema = { + encode(message: SnapshotSchema, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.keys) { + writer.uint32(10).bytes(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SnapshotSchema { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSnapshotSchema(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.keys.push(reader.bytes()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SnapshotSchema { + const message = createBaseSnapshotSchema(); + message.keys = object.keys?.map(e => e) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/base/store/v1beta1/commit_info.ts b/packages/codegen/src/cosmos/base/store/v1beta1/commit_info.ts new file mode 100644 index 00000000..406ef7a2 --- /dev/null +++ b/packages/codegen/src/cosmos/base/store/v1beta1/commit_info.ts @@ -0,0 +1,221 @@ +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * CommitInfo defines commit information used by the multi-store when committing + * a version/height. + */ + +export interface CommitInfo { + version: Long; + storeInfos: StoreInfo[]; +} +/** + * CommitInfo defines commit information used by the multi-store when committing + * a version/height. + */ + +export interface CommitInfoSDKType { + version: Long; + store_infos: StoreInfoSDKType[]; +} +/** + * StoreInfo defines store-specific commit information. It contains a reference + * between a store name and the commit ID. + */ + +export interface StoreInfo { + name: string; + commitId?: CommitID; +} +/** + * StoreInfo defines store-specific commit information. It contains a reference + * between a store name and the commit ID. + */ + +export interface StoreInfoSDKType { + name: string; + commit_id?: CommitIDSDKType; +} +/** + * CommitID defines the committment information when a specific store is + * committed. + */ + +export interface CommitID { + version: Long; + hash: Uint8Array; +} +/** + * CommitID defines the committment information when a specific store is + * committed. + */ + +export interface CommitIDSDKType { + version: Long; + hash: Uint8Array; +} + +function createBaseCommitInfo(): CommitInfo { + return { + version: Long.ZERO, + storeInfos: [] + }; +} + +export const CommitInfo = { + encode(message: CommitInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.version.isZero()) { + writer.uint32(8).int64(message.version); + } + + for (const v of message.storeInfos) { + StoreInfo.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CommitInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommitInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.version = (reader.int64() as Long); + break; + + case 2: + message.storeInfos.push(StoreInfo.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CommitInfo { + const message = createBaseCommitInfo(); + message.version = object.version !== undefined && object.version !== null ? Long.fromValue(object.version) : Long.ZERO; + message.storeInfos = object.storeInfos?.map(e => StoreInfo.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseStoreInfo(): StoreInfo { + return { + name: "", + commitId: undefined + }; +} + +export const StoreInfo = { + encode(message: StoreInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.commitId !== undefined) { + CommitID.encode(message.commitId, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StoreInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStoreInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.commitId = CommitID.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StoreInfo { + const message = createBaseStoreInfo(); + message.name = object.name ?? ""; + message.commitId = object.commitId !== undefined && object.commitId !== null ? CommitID.fromPartial(object.commitId) : undefined; + return message; + } + +}; + +function createBaseCommitID(): CommitID { + return { + version: Long.ZERO, + hash: new Uint8Array() + }; +} + +export const CommitID = { + encode(message: CommitID, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.version.isZero()) { + writer.uint32(8).int64(message.version); + } + + if (message.hash.length !== 0) { + writer.uint32(18).bytes(message.hash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CommitID { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommitID(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.version = (reader.int64() as Long); + break; + + case 2: + message.hash = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CommitID { + const message = createBaseCommitID(); + message.version = object.version !== undefined && object.version !== null ? Long.fromValue(object.version) : Long.ZERO; + message.hash = object.hash ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/base/store/v1beta1/listening.ts b/packages/codegen/src/cosmos/base/store/v1beta1/listening.ts new file mode 100644 index 00000000..3a4382be --- /dev/null +++ b/packages/codegen/src/cosmos/base/store/v1beta1/listening.ts @@ -0,0 +1,108 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * StoreKVPair is a KVStore KVPair used for listening to state changes (Sets and Deletes) + * It optionally includes the StoreKey for the originating KVStore and a Boolean flag to distinguish between Sets and + * Deletes + * + * Since: cosmos-sdk 0.43 + */ + +export interface StoreKVPair { + /** the store key for the KVStore this pair originates from */ + storeKey: string; + /** true indicates a delete operation, false indicates a set operation */ + + delete: boolean; + key: Uint8Array; + value: Uint8Array; +} +/** + * StoreKVPair is a KVStore KVPair used for listening to state changes (Sets and Deletes) + * It optionally includes the StoreKey for the originating KVStore and a Boolean flag to distinguish between Sets and + * Deletes + * + * Since: cosmos-sdk 0.43 + */ + +export interface StoreKVPairSDKType { + store_key: string; + delete: boolean; + key: Uint8Array; + value: Uint8Array; +} + +function createBaseStoreKVPair(): StoreKVPair { + return { + storeKey: "", + delete: false, + key: new Uint8Array(), + value: new Uint8Array() + }; +} + +export const StoreKVPair = { + encode(message: StoreKVPair, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.storeKey !== "") { + writer.uint32(10).string(message.storeKey); + } + + if (message.delete === true) { + writer.uint32(16).bool(message.delete); + } + + if (message.key.length !== 0) { + writer.uint32(26).bytes(message.key); + } + + if (message.value.length !== 0) { + writer.uint32(34).bytes(message.value); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StoreKVPair { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStoreKVPair(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.storeKey = reader.string(); + break; + + case 2: + message.delete = reader.bool(); + break; + + case 3: + message.key = reader.bytes(); + break; + + case 4: + message.value = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StoreKVPair { + const message = createBaseStoreKVPair(); + message.storeKey = object.storeKey ?? ""; + message.delete = object.delete ?? false; + message.key = object.key ?? new Uint8Array(); + message.value = object.value ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/base/tendermint/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/base/tendermint/v1beta1/query.lcd.ts new file mode 100644 index 00000000..831fcf2c --- /dev/null +++ b/packages/codegen/src/cosmos/base/tendermint/v1beta1/query.lcd.ts @@ -0,0 +1,81 @@ +import { setPaginationParams } from "../../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { GetNodeInfoRequest, GetNodeInfoResponseSDKType, GetSyncingRequest, GetSyncingResponseSDKType, GetLatestBlockRequest, GetLatestBlockResponseSDKType, GetBlockByHeightRequest, GetBlockByHeightResponseSDKType, GetLatestValidatorSetRequest, GetLatestValidatorSetResponseSDKType, GetValidatorSetByHeightRequest, GetValidatorSetByHeightResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.getNodeInfo = this.getNodeInfo.bind(this); + this.getSyncing = this.getSyncing.bind(this); + this.getLatestBlock = this.getLatestBlock.bind(this); + this.getBlockByHeight = this.getBlockByHeight.bind(this); + this.getLatestValidatorSet = this.getLatestValidatorSet.bind(this); + this.getValidatorSetByHeight = this.getValidatorSetByHeight.bind(this); + } + /* GetNodeInfo queries the current node info. */ + + + async getNodeInfo(_params: GetNodeInfoRequest = {}): Promise { + const endpoint = `cosmos/base/tendermint/v1beta1/node_info`; + return await this.req.get(endpoint); + } + /* GetSyncing queries node syncing. */ + + + async getSyncing(_params: GetSyncingRequest = {}): Promise { + const endpoint = `cosmos/base/tendermint/v1beta1/syncing`; + return await this.req.get(endpoint); + } + /* GetLatestBlock returns the latest block. */ + + + async getLatestBlock(_params: GetLatestBlockRequest = {}): Promise { + const endpoint = `cosmos/base/tendermint/v1beta1/blocks/latest`; + return await this.req.get(endpoint); + } + /* GetBlockByHeight queries block for given height. */ + + + async getBlockByHeight(params: GetBlockByHeightRequest): Promise { + const endpoint = `cosmos/base/tendermint/v1beta1/blocks/${params.height}`; + return await this.req.get(endpoint); + } + /* GetLatestValidatorSet queries latest validator-set. */ + + + async getLatestValidatorSet(params: GetLatestValidatorSetRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/base/tendermint/v1beta1/validatorsets/latest`; + return await this.req.get(endpoint, options); + } + /* GetValidatorSetByHeight queries validator-set at a given height. */ + + + async getValidatorSetByHeight(params: GetValidatorSetByHeightRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/base/tendermint/v1beta1/validatorsets/${params.height}`; + return await this.req.get(endpoint, options); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/base/tendermint/v1beta1/query.rpc.Service.ts b/packages/codegen/src/cosmos/base/tendermint/v1beta1/query.rpc.Service.ts new file mode 100644 index 00000000..ce8c9187 --- /dev/null +++ b/packages/codegen/src/cosmos/base/tendermint/v1beta1/query.rpc.Service.ts @@ -0,0 +1,107 @@ +import { Rpc } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { GetNodeInfoRequest, GetNodeInfoResponse, GetSyncingRequest, GetSyncingResponse, GetLatestBlockRequest, GetLatestBlockResponse, GetBlockByHeightRequest, GetBlockByHeightResponse, GetLatestValidatorSetRequest, GetLatestValidatorSetResponse, GetValidatorSetByHeightRequest, GetValidatorSetByHeightResponse } from "./query"; +/** Service defines the gRPC querier service for tendermint queries. */ + +export interface Service { + /** GetNodeInfo queries the current node info. */ + getNodeInfo(request?: GetNodeInfoRequest): Promise; + /** GetSyncing queries node syncing. */ + + getSyncing(request?: GetSyncingRequest): Promise; + /** GetLatestBlock returns the latest block. */ + + getLatestBlock(request?: GetLatestBlockRequest): Promise; + /** GetBlockByHeight queries block for given height. */ + + getBlockByHeight(request: GetBlockByHeightRequest): Promise; + /** GetLatestValidatorSet queries latest validator-set. */ + + getLatestValidatorSet(request?: GetLatestValidatorSetRequest): Promise; + /** GetValidatorSetByHeight queries validator-set at a given height. */ + + getValidatorSetByHeight(request: GetValidatorSetByHeightRequest): Promise; +} +export class ServiceClientImpl implements Service { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.getNodeInfo = this.getNodeInfo.bind(this); + this.getSyncing = this.getSyncing.bind(this); + this.getLatestBlock = this.getLatestBlock.bind(this); + this.getBlockByHeight = this.getBlockByHeight.bind(this); + this.getLatestValidatorSet = this.getLatestValidatorSet.bind(this); + this.getValidatorSetByHeight = this.getValidatorSetByHeight.bind(this); + } + + getNodeInfo(request: GetNodeInfoRequest = {}): Promise { + const data = GetNodeInfoRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "GetNodeInfo", data); + return promise.then(data => GetNodeInfoResponse.decode(new _m0.Reader(data))); + } + + getSyncing(request: GetSyncingRequest = {}): Promise { + const data = GetSyncingRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "GetSyncing", data); + return promise.then(data => GetSyncingResponse.decode(new _m0.Reader(data))); + } + + getLatestBlock(request: GetLatestBlockRequest = {}): Promise { + const data = GetLatestBlockRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "GetLatestBlock", data); + return promise.then(data => GetLatestBlockResponse.decode(new _m0.Reader(data))); + } + + getBlockByHeight(request: GetBlockByHeightRequest): Promise { + const data = GetBlockByHeightRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "GetBlockByHeight", data); + return promise.then(data => GetBlockByHeightResponse.decode(new _m0.Reader(data))); + } + + getLatestValidatorSet(request: GetLatestValidatorSetRequest = { + pagination: undefined + }): Promise { + const data = GetLatestValidatorSetRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "GetLatestValidatorSet", data); + return promise.then(data => GetLatestValidatorSetResponse.decode(new _m0.Reader(data))); + } + + getValidatorSetByHeight(request: GetValidatorSetByHeightRequest): Promise { + const data = GetValidatorSetByHeightRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "GetValidatorSetByHeight", data); + return promise.then(data => GetValidatorSetByHeightResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new ServiceClientImpl(rpc); + return { + getNodeInfo(request?: GetNodeInfoRequest): Promise { + return queryService.getNodeInfo(request); + }, + + getSyncing(request?: GetSyncingRequest): Promise { + return queryService.getSyncing(request); + }, + + getLatestBlock(request?: GetLatestBlockRequest): Promise { + return queryService.getLatestBlock(request); + }, + + getBlockByHeight(request: GetBlockByHeightRequest): Promise { + return queryService.getBlockByHeight(request); + }, + + getLatestValidatorSet(request?: GetLatestValidatorSetRequest): Promise { + return queryService.getLatestValidatorSet(request); + }, + + getValidatorSetByHeight(request: GetValidatorSetByHeightRequest): Promise { + return queryService.getValidatorSetByHeight(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/base/tendermint/v1beta1/query.ts b/packages/codegen/src/cosmos/base/tendermint/v1beta1/query.ts new file mode 100644 index 00000000..8e2b5d64 --- /dev/null +++ b/packages/codegen/src/cosmos/base/tendermint/v1beta1/query.ts @@ -0,0 +1,1041 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../query/v1beta1/pagination"; +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { BlockID, BlockIDSDKType } from "../../../../tendermint/types/types"; +import { Block, BlockSDKType } from "../../../../tendermint/types/block"; +import { NodeInfo, NodeInfoSDKType } from "../../../../tendermint/p2p/types"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GetValidatorSetByHeightRequest is the request type for the Query/GetValidatorSetByHeight RPC method. */ + +export interface GetValidatorSetByHeightRequest { + height: Long; + /** pagination defines an pagination for the request. */ + + pagination?: PageRequest; +} +/** GetValidatorSetByHeightRequest is the request type for the Query/GetValidatorSetByHeight RPC method. */ + +export interface GetValidatorSetByHeightRequestSDKType { + height: Long; + pagination?: PageRequestSDKType; +} +/** GetValidatorSetByHeightResponse is the response type for the Query/GetValidatorSetByHeight RPC method. */ + +export interface GetValidatorSetByHeightResponse { + blockHeight: Long; + validators: Validator[]; + /** pagination defines an pagination for the response. */ + + pagination?: PageResponse; +} +/** GetValidatorSetByHeightResponse is the response type for the Query/GetValidatorSetByHeight RPC method. */ + +export interface GetValidatorSetByHeightResponseSDKType { + block_height: Long; + validators: ValidatorSDKType[]; + pagination?: PageResponseSDKType; +} +/** GetLatestValidatorSetRequest is the request type for the Query/GetValidatorSetByHeight RPC method. */ + +export interface GetLatestValidatorSetRequest { + /** pagination defines an pagination for the request. */ + pagination?: PageRequest; +} +/** GetLatestValidatorSetRequest is the request type for the Query/GetValidatorSetByHeight RPC method. */ + +export interface GetLatestValidatorSetRequestSDKType { + pagination?: PageRequestSDKType; +} +/** GetLatestValidatorSetResponse is the response type for the Query/GetValidatorSetByHeight RPC method. */ + +export interface GetLatestValidatorSetResponse { + blockHeight: Long; + validators: Validator[]; + /** pagination defines an pagination for the response. */ + + pagination?: PageResponse; +} +/** GetLatestValidatorSetResponse is the response type for the Query/GetValidatorSetByHeight RPC method. */ + +export interface GetLatestValidatorSetResponseSDKType { + block_height: Long; + validators: ValidatorSDKType[]; + pagination?: PageResponseSDKType; +} +/** Validator is the type for the validator-set. */ + +export interface Validator { + address: string; + pubKey?: Any; + votingPower: Long; + proposerPriority: Long; +} +/** Validator is the type for the validator-set. */ + +export interface ValidatorSDKType { + address: string; + pub_key?: AnySDKType; + voting_power: Long; + proposer_priority: Long; +} +/** GetBlockByHeightRequest is the request type for the Query/GetBlockByHeight RPC method. */ + +export interface GetBlockByHeightRequest { + height: Long; +} +/** GetBlockByHeightRequest is the request type for the Query/GetBlockByHeight RPC method. */ + +export interface GetBlockByHeightRequestSDKType { + height: Long; +} +/** GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method. */ + +export interface GetBlockByHeightResponse { + blockId?: BlockID; + block?: Block; +} +/** GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method. */ + +export interface GetBlockByHeightResponseSDKType { + block_id?: BlockIDSDKType; + block?: BlockSDKType; +} +/** GetLatestBlockRequest is the request type for the Query/GetLatestBlock RPC method. */ + +export interface GetLatestBlockRequest {} +/** GetLatestBlockRequest is the request type for the Query/GetLatestBlock RPC method. */ + +export interface GetLatestBlockRequestSDKType {} +/** GetLatestBlockResponse is the response type for the Query/GetLatestBlock RPC method. */ + +export interface GetLatestBlockResponse { + blockId?: BlockID; + block?: Block; +} +/** GetLatestBlockResponse is the response type for the Query/GetLatestBlock RPC method. */ + +export interface GetLatestBlockResponseSDKType { + block_id?: BlockIDSDKType; + block?: BlockSDKType; +} +/** GetSyncingRequest is the request type for the Query/GetSyncing RPC method. */ + +export interface GetSyncingRequest {} +/** GetSyncingRequest is the request type for the Query/GetSyncing RPC method. */ + +export interface GetSyncingRequestSDKType {} +/** GetSyncingResponse is the response type for the Query/GetSyncing RPC method. */ + +export interface GetSyncingResponse { + syncing: boolean; +} +/** GetSyncingResponse is the response type for the Query/GetSyncing RPC method. */ + +export interface GetSyncingResponseSDKType { + syncing: boolean; +} +/** GetNodeInfoRequest is the request type for the Query/GetNodeInfo RPC method. */ + +export interface GetNodeInfoRequest {} +/** GetNodeInfoRequest is the request type for the Query/GetNodeInfo RPC method. */ + +export interface GetNodeInfoRequestSDKType {} +/** GetNodeInfoResponse is the response type for the Query/GetNodeInfo RPC method. */ + +export interface GetNodeInfoResponse { + nodeInfo?: NodeInfo; + applicationVersion?: VersionInfo; +} +/** GetNodeInfoResponse is the response type for the Query/GetNodeInfo RPC method. */ + +export interface GetNodeInfoResponseSDKType { + node_info?: NodeInfoSDKType; + application_version?: VersionInfoSDKType; +} +/** VersionInfo is the type for the GetNodeInfoResponse message. */ + +export interface VersionInfo { + name: string; + appName: string; + version: string; + gitCommit: string; + buildTags: string; + goVersion: string; + buildDeps: Module[]; + /** Since: cosmos-sdk 0.43 */ + + cosmosSdkVersion: string; +} +/** VersionInfo is the type for the GetNodeInfoResponse message. */ + +export interface VersionInfoSDKType { + name: string; + app_name: string; + version: string; + git_commit: string; + build_tags: string; + go_version: string; + build_deps: ModuleSDKType[]; + cosmos_sdk_version: string; +} +/** Module is the type for VersionInfo */ + +export interface Module { + /** module path */ + path: string; + /** module version */ + + version: string; + /** checksum */ + + sum: string; +} +/** Module is the type for VersionInfo */ + +export interface ModuleSDKType { + path: string; + version: string; + sum: string; +} + +function createBaseGetValidatorSetByHeightRequest(): GetValidatorSetByHeightRequest { + return { + height: Long.ZERO, + pagination: undefined + }; +} + +export const GetValidatorSetByHeightRequest = { + encode(message: GetValidatorSetByHeightRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).int64(message.height); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetValidatorSetByHeightRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetValidatorSetByHeightRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.int64() as Long); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetValidatorSetByHeightRequest { + const message = createBaseGetValidatorSetByHeightRequest(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseGetValidatorSetByHeightResponse(): GetValidatorSetByHeightResponse { + return { + blockHeight: Long.ZERO, + validators: [], + pagination: undefined + }; +} + +export const GetValidatorSetByHeightResponse = { + encode(message: GetValidatorSetByHeightResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.blockHeight.isZero()) { + writer.uint32(8).int64(message.blockHeight); + } + + for (const v of message.validators) { + Validator.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetValidatorSetByHeightResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetValidatorSetByHeightResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.blockHeight = (reader.int64() as Long); + break; + + case 2: + message.validators.push(Validator.decode(reader, reader.uint32())); + break; + + case 3: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetValidatorSetByHeightResponse { + const message = createBaseGetValidatorSetByHeightResponse(); + message.blockHeight = object.blockHeight !== undefined && object.blockHeight !== null ? Long.fromValue(object.blockHeight) : Long.ZERO; + message.validators = object.validators?.map(e => Validator.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseGetLatestValidatorSetRequest(): GetLatestValidatorSetRequest { + return { + pagination: undefined + }; +} + +export const GetLatestValidatorSetRequest = { + encode(message: GetLatestValidatorSetRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestValidatorSetRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetLatestValidatorSetRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetLatestValidatorSetRequest { + const message = createBaseGetLatestValidatorSetRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseGetLatestValidatorSetResponse(): GetLatestValidatorSetResponse { + return { + blockHeight: Long.ZERO, + validators: [], + pagination: undefined + }; +} + +export const GetLatestValidatorSetResponse = { + encode(message: GetLatestValidatorSetResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.blockHeight.isZero()) { + writer.uint32(8).int64(message.blockHeight); + } + + for (const v of message.validators) { + Validator.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestValidatorSetResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetLatestValidatorSetResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.blockHeight = (reader.int64() as Long); + break; + + case 2: + message.validators.push(Validator.decode(reader, reader.uint32())); + break; + + case 3: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetLatestValidatorSetResponse { + const message = createBaseGetLatestValidatorSetResponse(); + message.blockHeight = object.blockHeight !== undefined && object.blockHeight !== null ? Long.fromValue(object.blockHeight) : Long.ZERO; + message.validators = object.validators?.map(e => Validator.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseValidator(): Validator { + return { + address: "", + pubKey: undefined, + votingPower: Long.ZERO, + proposerPriority: Long.ZERO + }; +} + +export const Validator = { + encode(message: Validator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.pubKey !== undefined) { + Any.encode(message.pubKey, writer.uint32(18).fork()).ldelim(); + } + + if (!message.votingPower.isZero()) { + writer.uint32(24).int64(message.votingPower); + } + + if (!message.proposerPriority.isZero()) { + writer.uint32(32).int64(message.proposerPriority); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Validator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidator(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.pubKey = Any.decode(reader, reader.uint32()); + break; + + case 3: + message.votingPower = (reader.int64() as Long); + break; + + case 4: + message.proposerPriority = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Validator { + const message = createBaseValidator(); + message.address = object.address ?? ""; + message.pubKey = object.pubKey !== undefined && object.pubKey !== null ? Any.fromPartial(object.pubKey) : undefined; + message.votingPower = object.votingPower !== undefined && object.votingPower !== null ? Long.fromValue(object.votingPower) : Long.ZERO; + message.proposerPriority = object.proposerPriority !== undefined && object.proposerPriority !== null ? Long.fromValue(object.proposerPriority) : Long.ZERO; + return message; + } + +}; + +function createBaseGetBlockByHeightRequest(): GetBlockByHeightRequest { + return { + height: Long.ZERO + }; +} + +export const GetBlockByHeightRequest = { + encode(message: GetBlockByHeightRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).int64(message.height); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockByHeightRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetBlockByHeightRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetBlockByHeightRequest { + const message = createBaseGetBlockByHeightRequest(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + return message; + } + +}; + +function createBaseGetBlockByHeightResponse(): GetBlockByHeightResponse { + return { + blockId: undefined, + block: undefined + }; +} + +export const GetBlockByHeightResponse = { + encode(message: GetBlockByHeightResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(10).fork()).ldelim(); + } + + if (message.block !== undefined) { + Block.encode(message.block, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockByHeightResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetBlockByHeightResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + + case 2: + message.block = Block.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetBlockByHeightResponse { + const message = createBaseGetBlockByHeightResponse(); + message.blockId = object.blockId !== undefined && object.blockId !== null ? BlockID.fromPartial(object.blockId) : undefined; + message.block = object.block !== undefined && object.block !== null ? Block.fromPartial(object.block) : undefined; + return message; + } + +}; + +function createBaseGetLatestBlockRequest(): GetLatestBlockRequest { + return {}; +} + +export const GetLatestBlockRequest = { + encode(_: GetLatestBlockRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestBlockRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetLatestBlockRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): GetLatestBlockRequest { + const message = createBaseGetLatestBlockRequest(); + return message; + } + +}; + +function createBaseGetLatestBlockResponse(): GetLatestBlockResponse { + return { + blockId: undefined, + block: undefined + }; +} + +export const GetLatestBlockResponse = { + encode(message: GetLatestBlockResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(10).fork()).ldelim(); + } + + if (message.block !== undefined) { + Block.encode(message.block, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestBlockResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetLatestBlockResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + + case 2: + message.block = Block.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetLatestBlockResponse { + const message = createBaseGetLatestBlockResponse(); + message.blockId = object.blockId !== undefined && object.blockId !== null ? BlockID.fromPartial(object.blockId) : undefined; + message.block = object.block !== undefined && object.block !== null ? Block.fromPartial(object.block) : undefined; + return message; + } + +}; + +function createBaseGetSyncingRequest(): GetSyncingRequest { + return {}; +} + +export const GetSyncingRequest = { + encode(_: GetSyncingRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetSyncingRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetSyncingRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): GetSyncingRequest { + const message = createBaseGetSyncingRequest(); + return message; + } + +}; + +function createBaseGetSyncingResponse(): GetSyncingResponse { + return { + syncing: false + }; +} + +export const GetSyncingResponse = { + encode(message: GetSyncingResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.syncing === true) { + writer.uint32(8).bool(message.syncing); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetSyncingResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetSyncingResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.syncing = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetSyncingResponse { + const message = createBaseGetSyncingResponse(); + message.syncing = object.syncing ?? false; + return message; + } + +}; + +function createBaseGetNodeInfoRequest(): GetNodeInfoRequest { + return {}; +} + +export const GetNodeInfoRequest = { + encode(_: GetNodeInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetNodeInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetNodeInfoRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): GetNodeInfoRequest { + const message = createBaseGetNodeInfoRequest(); + return message; + } + +}; + +function createBaseGetNodeInfoResponse(): GetNodeInfoResponse { + return { + nodeInfo: undefined, + applicationVersion: undefined + }; +} + +export const GetNodeInfoResponse = { + encode(message: GetNodeInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nodeInfo !== undefined) { + NodeInfo.encode(message.nodeInfo, writer.uint32(10).fork()).ldelim(); + } + + if (message.applicationVersion !== undefined) { + VersionInfo.encode(message.applicationVersion, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetNodeInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetNodeInfoResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.nodeInfo = NodeInfo.decode(reader, reader.uint32()); + break; + + case 2: + message.applicationVersion = VersionInfo.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetNodeInfoResponse { + const message = createBaseGetNodeInfoResponse(); + message.nodeInfo = object.nodeInfo !== undefined && object.nodeInfo !== null ? NodeInfo.fromPartial(object.nodeInfo) : undefined; + message.applicationVersion = object.applicationVersion !== undefined && object.applicationVersion !== null ? VersionInfo.fromPartial(object.applicationVersion) : undefined; + return message; + } + +}; + +function createBaseVersionInfo(): VersionInfo { + return { + name: "", + appName: "", + version: "", + gitCommit: "", + buildTags: "", + goVersion: "", + buildDeps: [], + cosmosSdkVersion: "" + }; +} + +export const VersionInfo = { + encode(message: VersionInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.appName !== "") { + writer.uint32(18).string(message.appName); + } + + if (message.version !== "") { + writer.uint32(26).string(message.version); + } + + if (message.gitCommit !== "") { + writer.uint32(34).string(message.gitCommit); + } + + if (message.buildTags !== "") { + writer.uint32(42).string(message.buildTags); + } + + if (message.goVersion !== "") { + writer.uint32(50).string(message.goVersion); + } + + for (const v of message.buildDeps) { + Module.encode(v!, writer.uint32(58).fork()).ldelim(); + } + + if (message.cosmosSdkVersion !== "") { + writer.uint32(66).string(message.cosmosSdkVersion); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VersionInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVersionInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.appName = reader.string(); + break; + + case 3: + message.version = reader.string(); + break; + + case 4: + message.gitCommit = reader.string(); + break; + + case 5: + message.buildTags = reader.string(); + break; + + case 6: + message.goVersion = reader.string(); + break; + + case 7: + message.buildDeps.push(Module.decode(reader, reader.uint32())); + break; + + case 8: + message.cosmosSdkVersion = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): VersionInfo { + const message = createBaseVersionInfo(); + message.name = object.name ?? ""; + message.appName = object.appName ?? ""; + message.version = object.version ?? ""; + message.gitCommit = object.gitCommit ?? ""; + message.buildTags = object.buildTags ?? ""; + message.goVersion = object.goVersion ?? ""; + message.buildDeps = object.buildDeps?.map(e => Module.fromPartial(e)) || []; + message.cosmosSdkVersion = object.cosmosSdkVersion ?? ""; + return message; + } + +}; + +function createBaseModule(): Module { + return { + path: "", + version: "", + sum: "" + }; +} + +export const Module = { + encode(message: Module, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path !== "") { + writer.uint32(10).string(message.path); + } + + if (message.version !== "") { + writer.uint32(18).string(message.version); + } + + if (message.sum !== "") { + writer.uint32(26).string(message.sum); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Module { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModule(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.string(); + break; + + case 2: + message.version = reader.string(); + break; + + case 3: + message.sum = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Module { + const message = createBaseModule(); + message.path = object.path ?? ""; + message.version = object.version ?? ""; + message.sum = object.sum ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/base/v1beta1/coin.ts b/packages/codegen/src/cosmos/base/v1beta1/coin.ts new file mode 100644 index 00000000..53aa3bc8 --- /dev/null +++ b/packages/codegen/src/cosmos/base/v1beta1/coin.ts @@ -0,0 +1,266 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * Coin defines a token with a denomination and an amount. + * + * NOTE: The amount field is an Int which implements the custom method + * signatures required by gogoproto. + */ + +export interface Coin { + denom: string; + amount: string; +} +/** + * Coin defines a token with a denomination and an amount. + * + * NOTE: The amount field is an Int which implements the custom method + * signatures required by gogoproto. + */ + +export interface CoinSDKType { + denom: string; + amount: string; +} +/** + * DecCoin defines a token with a denomination and a decimal amount. + * + * NOTE: The amount field is an Dec which implements the custom method + * signatures required by gogoproto. + */ + +export interface DecCoin { + denom: string; + amount: string; +} +/** + * DecCoin defines a token with a denomination and a decimal amount. + * + * NOTE: The amount field is an Dec which implements the custom method + * signatures required by gogoproto. + */ + +export interface DecCoinSDKType { + denom: string; + amount: string; +} +/** IntProto defines a Protobuf wrapper around an Int object. */ + +export interface IntProto { + int: string; +} +/** IntProto defines a Protobuf wrapper around an Int object. */ + +export interface IntProtoSDKType { + int: string; +} +/** DecProto defines a Protobuf wrapper around a Dec object. */ + +export interface DecProto { + dec: string; +} +/** DecProto defines a Protobuf wrapper around a Dec object. */ + +export interface DecProtoSDKType { + dec: string; +} + +function createBaseCoin(): Coin { + return { + denom: "", + amount: "" + }; +} + +export const Coin = { + encode(message: Coin, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denom !== "") { + writer.uint32(10).string(message.denom); + } + + if (message.amount !== "") { + writer.uint32(18).string(message.amount); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Coin { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCoin(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + + case 2: + message.amount = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Coin { + const message = createBaseCoin(); + message.denom = object.denom ?? ""; + message.amount = object.amount ?? ""; + return message; + } + +}; + +function createBaseDecCoin(): DecCoin { + return { + denom: "", + amount: "" + }; +} + +export const DecCoin = { + encode(message: DecCoin, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denom !== "") { + writer.uint32(10).string(message.denom); + } + + if (message.amount !== "") { + writer.uint32(18).string(message.amount); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecCoin { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecCoin(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + + case 2: + message.amount = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DecCoin { + const message = createBaseDecCoin(); + message.denom = object.denom ?? ""; + message.amount = object.amount ?? ""; + return message; + } + +}; + +function createBaseIntProto(): IntProto { + return { + int: "" + }; +} + +export const IntProto = { + encode(message: IntProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.int !== "") { + writer.uint32(10).string(message.int); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): IntProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseIntProto(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.int = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): IntProto { + const message = createBaseIntProto(); + message.int = object.int ?? ""; + return message; + } + +}; + +function createBaseDecProto(): DecProto { + return { + dec: "" + }; +} + +export const DecProto = { + encode(message: DecProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.dec !== "") { + writer.uint32(10).string(message.dec); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecProto(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.dec = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DecProto { + const message = createBaseDecProto(); + message.dec = object.dec ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/bundle.ts b/packages/codegen/src/cosmos/bundle.ts new file mode 100644 index 00000000..7f36ca32 --- /dev/null +++ b/packages/codegen/src/cosmos/bundle.ts @@ -0,0 +1,399 @@ +import * as _3 from "./app/v1alpha1/config"; +import * as _4 from "./app/v1alpha1/module"; +import * as _5 from "./app/v1alpha1/query"; +import * as _6 from "./auth/v1beta1/auth"; +import * as _7 from "./auth/v1beta1/genesis"; +import * as _8 from "./auth/v1beta1/query"; +import * as _9 from "./authz/v1beta1/authz"; +import * as _10 from "./authz/v1beta1/event"; +import * as _11 from "./authz/v1beta1/genesis"; +import * as _12 from "./authz/v1beta1/query"; +import * as _13 from "./authz/v1beta1/tx"; +import * as _14 from "./bank/v1beta1/authz"; +import * as _15 from "./bank/v1beta1/bank"; +import * as _16 from "./bank/v1beta1/genesis"; +import * as _17 from "./bank/v1beta1/query"; +import * as _18 from "./bank/v1beta1/tx"; +import * as _19 from "./base/abci/v1beta1/abci"; +import * as _20 from "./base/kv/v1beta1/kv"; +import * as _21 from "./base/query/v1beta1/pagination"; +import * as _22 from "./base/reflection/v1beta1/reflection"; +import * as _23 from "./base/reflection/v2alpha1/reflection"; +import * as _24 from "./base/snapshots/v1beta1/snapshot"; +import * as _25 from "./base/store/v1beta1/commit_info"; +import * as _26 from "./base/store/v1beta1/listening"; +import * as _27 from "./base/tendermint/v1beta1/query"; +import * as _28 from "./base/v1beta1/coin"; +import * as _29 from "./capability/v1beta1/capability"; +import * as _30 from "./capability/v1beta1/genesis"; +import * as _31 from "./crisis/v1beta1/genesis"; +import * as _32 from "./crisis/v1beta1/tx"; +import * as _33 from "./crypto/ed25519/keys"; +import * as _34 from "./crypto/hd/v1/hd"; +import * as _35 from "./crypto/keyring/v1/record"; +import * as _36 from "./crypto/multisig/keys"; +import * as _37 from "./crypto/secp256k1/keys"; +import * as _38 from "./crypto/secp256r1/keys"; +import * as _39 from "./distribution/v1beta1/distribution"; +import * as _40 from "./distribution/v1beta1/genesis"; +import * as _41 from "./distribution/v1beta1/query"; +import * as _42 from "./distribution/v1beta1/tx"; +import * as _43 from "./evidence/v1beta1/evidence"; +import * as _44 from "./evidence/v1beta1/genesis"; +import * as _45 from "./evidence/v1beta1/query"; +import * as _46 from "./evidence/v1beta1/tx"; +import * as _47 from "./feegrant/v1beta1/feegrant"; +import * as _48 from "./feegrant/v1beta1/genesis"; +import * as _49 from "./feegrant/v1beta1/query"; +import * as _50 from "./feegrant/v1beta1/tx"; +import * as _51 from "./genutil/v1beta1/genesis"; +import * as _52 from "./gov/v1/genesis"; +import * as _53 from "./gov/v1/gov"; +import * as _54 from "./gov/v1/query"; +import * as _55 from "./gov/v1/tx"; +import * as _56 from "./gov/v1beta1/genesis"; +import * as _57 from "./gov/v1beta1/gov"; +import * as _58 from "./gov/v1beta1/query"; +import * as _59 from "./gov/v1beta1/tx"; +import * as _60 from "./group/v1/events"; +import * as _61 from "./group/v1/genesis"; +import * as _62 from "./group/v1/query"; +import * as _63 from "./group/v1/tx"; +import * as _64 from "./group/v1/types"; +import * as _65 from "./mint/v1beta1/genesis"; +import * as _66 from "./mint/v1beta1/mint"; +import * as _67 from "./mint/v1beta1/query"; +import * as _68 from "./msg/v1/msg"; +import * as _69 from "./nft/v1beta1/event"; +import * as _70 from "./nft/v1beta1/genesis"; +import * as _71 from "./nft/v1beta1/nft"; +import * as _72 from "./nft/v1beta1/query"; +import * as _73 from "./nft/v1beta1/tx"; +import * as _74 from "./orm/v1/orm"; +import * as _75 from "./orm/v1alpha1/schema"; +import * as _76 from "./params/v1beta1/params"; +import * as _77 from "./params/v1beta1/query"; +import * as _78 from "./slashing/v1beta1/genesis"; +import * as _79 from "./slashing/v1beta1/query"; +import * as _80 from "./slashing/v1beta1/slashing"; +import * as _81 from "./slashing/v1beta1/tx"; +import * as _82 from "./staking/v1beta1/authz"; +import * as _83 from "./staking/v1beta1/genesis"; +import * as _84 from "./staking/v1beta1/query"; +import * as _85 from "./staking/v1beta1/staking"; +import * as _86 from "./staking/v1beta1/tx"; +import * as _87 from "./tx/signing/v1beta1/signing"; +import * as _88 from "./tx/v1beta1/service"; +import * as _89 from "./tx/v1beta1/tx"; +import * as _90 from "./upgrade/v1beta1/query"; +import * as _91 from "./upgrade/v1beta1/tx"; +import * as _92 from "./upgrade/v1beta1/upgrade"; +import * as _93 from "./vesting/v1beta1/tx"; +import * as _94 from "./vesting/v1beta1/vesting"; +import * as _145 from "./auth/v1beta1/query.lcd"; +import * as _146 from "./authz/v1beta1/query.lcd"; +import * as _147 from "./bank/v1beta1/query.lcd"; +import * as _148 from "./base/tendermint/v1beta1/query.lcd"; +import * as _149 from "./distribution/v1beta1/query.lcd"; +import * as _150 from "./evidence/v1beta1/query.lcd"; +import * as _151 from "./feegrant/v1beta1/query.lcd"; +import * as _152 from "./gov/v1/query.lcd"; +import * as _153 from "./gov/v1beta1/query.lcd"; +import * as _154 from "./group/v1/query.lcd"; +import * as _155 from "./mint/v1beta1/query.lcd"; +import * as _156 from "./nft/v1beta1/query.lcd"; +import * as _157 from "./params/v1beta1/query.lcd"; +import * as _158 from "./slashing/v1beta1/query.lcd"; +import * as _159 from "./staking/v1beta1/query.lcd"; +import * as _160 from "./tx/v1beta1/service.lcd"; +import * as _161 from "./upgrade/v1beta1/query.lcd"; +import * as _162 from "./app/v1alpha1/query.rpc.Query"; +import * as _163 from "./auth/v1beta1/query.rpc.Query"; +import * as _164 from "./authz/v1beta1/query.rpc.Query"; +import * as _165 from "./bank/v1beta1/query.rpc.Query"; +import * as _166 from "./base/tendermint/v1beta1/query.rpc.Service"; +import * as _167 from "./distribution/v1beta1/query.rpc.Query"; +import * as _168 from "./evidence/v1beta1/query.rpc.Query"; +import * as _169 from "./feegrant/v1beta1/query.rpc.Query"; +import * as _170 from "./gov/v1/query.rpc.Query"; +import * as _171 from "./gov/v1beta1/query.rpc.Query"; +import * as _172 from "./group/v1/query.rpc.Query"; +import * as _173 from "./mint/v1beta1/query.rpc.Query"; +import * as _174 from "./nft/v1beta1/query.rpc.Query"; +import * as _175 from "./params/v1beta1/query.rpc.Query"; +import * as _176 from "./slashing/v1beta1/query.rpc.Query"; +import * as _177 from "./staking/v1beta1/query.rpc.Query"; +import * as _178 from "./tx/v1beta1/service.rpc.Service"; +import * as _179 from "./upgrade/v1beta1/query.rpc.Query"; +import * as _180 from "./authz/v1beta1/tx.rpc.msg"; +import * as _181 from "./bank/v1beta1/tx.rpc.msg"; +import * as _182 from "./crisis/v1beta1/tx.rpc.msg"; +import * as _183 from "./distribution/v1beta1/tx.rpc.msg"; +import * as _184 from "./evidence/v1beta1/tx.rpc.msg"; +import * as _185 from "./feegrant/v1beta1/tx.rpc.msg"; +import * as _186 from "./gov/v1/tx.rpc.msg"; +import * as _187 from "./gov/v1beta1/tx.rpc.msg"; +import * as _188 from "./group/v1/tx.rpc.msg"; +import * as _189 from "./nft/v1beta1/tx.rpc.msg"; +import * as _190 from "./slashing/v1beta1/tx.rpc.msg"; +import * as _191 from "./staking/v1beta1/tx.rpc.msg"; +import * as _192 from "./upgrade/v1beta1/tx.rpc.msg"; +import * as _193 from "./vesting/v1beta1/tx.rpc.msg"; +import * as _210 from "./lcd"; +import * as _211 from "./rpc.query"; +import * as _212 from "./rpc.tx"; +export namespace cosmos { + export namespace app { + export const v1alpha1 = { ..._3, + ..._4, + ..._5, + ..._162 + }; + } + export namespace auth { + export const v1beta1 = { ..._6, + ..._7, + ..._8, + ..._145, + ..._163 + }; + } + export namespace authz { + export const v1beta1 = { ..._9, + ..._10, + ..._11, + ..._12, + ..._13, + ..._146, + ..._164, + ..._180 + }; + } + export namespace bank { + export const v1beta1 = { ..._14, + ..._15, + ..._16, + ..._17, + ..._18, + ..._147, + ..._165, + ..._181 + }; + } + export namespace base { + export namespace abci { + export const v1beta1 = { ..._19 + }; + } + export namespace kv { + export const v1beta1 = { ..._20 + }; + } + export namespace query { + export const v1beta1 = { ..._21 + }; + } + export namespace reflection { + export const v1beta1 = { ..._22 + }; + export const v2alpha1 = { ..._23 + }; + } + export namespace snapshots { + export const v1beta1 = { ..._24 + }; + } + export namespace store { + export const v1beta1 = { ..._25, + ..._26 + }; + } + export namespace tendermint { + export const v1beta1 = { ..._27, + ..._148, + ..._166 + }; + } + export const v1beta1 = { ..._28 + }; + } + export namespace capability { + export const v1beta1 = { ..._29, + ..._30 + }; + } + export namespace crisis { + export const v1beta1 = { ..._31, + ..._32, + ..._182 + }; + } + export namespace crypto { + export const ed25519 = { ..._33 + }; + export namespace hd { + export const v1 = { ..._34 + }; + } + export namespace keyring { + export const v1 = { ..._35 + }; + } + export const multisig = { ..._36 + }; + export const secp256k1 = { ..._37 + }; + export const secp256r1 = { ..._38 + }; + } + export namespace distribution { + export const v1beta1 = { ..._39, + ..._40, + ..._41, + ..._42, + ..._149, + ..._167, + ..._183 + }; + } + export namespace evidence { + export const v1beta1 = { ..._43, + ..._44, + ..._45, + ..._46, + ..._150, + ..._168, + ..._184 + }; + } + export namespace feegrant { + export const v1beta1 = { ..._47, + ..._48, + ..._49, + ..._50, + ..._151, + ..._169, + ..._185 + }; + } + export namespace genutil { + export const v1beta1 = { ..._51 + }; + } + export namespace gov { + export const v1 = { ..._52, + ..._53, + ..._54, + ..._55, + ..._152, + ..._170, + ..._186 + }; + export const v1beta1 = { ..._56, + ..._57, + ..._58, + ..._59, + ..._153, + ..._171, + ..._187 + }; + } + export namespace group { + export const v1 = { ..._60, + ..._61, + ..._62, + ..._63, + ..._64, + ..._154, + ..._172, + ..._188 + }; + } + export namespace mint { + export const v1beta1 = { ..._65, + ..._66, + ..._67, + ..._155, + ..._173 + }; + } + export namespace msg { + export const v1 = { ..._68 + }; + } + export namespace nft { + export const v1beta1 = { ..._69, + ..._70, + ..._71, + ..._72, + ..._73, + ..._156, + ..._174, + ..._189 + }; + } + export namespace orm { + export const v1 = { ..._74 + }; + export const v1alpha1 = { ..._75 + }; + } + export namespace params { + export const v1beta1 = { ..._76, + ..._77, + ..._157, + ..._175 + }; + } + export namespace slashing { + export const v1beta1 = { ..._78, + ..._79, + ..._80, + ..._81, + ..._158, + ..._176, + ..._190 + }; + } + export namespace staking { + export const v1beta1 = { ..._82, + ..._83, + ..._84, + ..._85, + ..._86, + ..._159, + ..._177, + ..._191 + }; + } + export namespace tx { + export namespace signing { + export const v1beta1 = { ..._87 + }; + } + export const v1beta1 = { ..._88, + ..._89, + ..._160, + ..._178 + }; + } + export namespace upgrade { + export const v1beta1 = { ..._90, + ..._91, + ..._92, + ..._161, + ..._179, + ..._192 + }; + } + export namespace vesting { + export const v1beta1 = { ..._93, + ..._94, + ..._193 + }; + } + export const ClientFactory = { ..._210, + ..._211, + ..._212 + }; +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/capability/v1beta1/capability.ts b/packages/codegen/src/cosmos/capability/v1beta1/capability.ts new file mode 100644 index 00000000..fe85a141 --- /dev/null +++ b/packages/codegen/src/cosmos/capability/v1beta1/capability.ts @@ -0,0 +1,197 @@ +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * Capability defines an implementation of an object capability. The index + * provided to a Capability must be globally unique. + */ + +export interface Capability { + index: Long; +} +/** + * Capability defines an implementation of an object capability. The index + * provided to a Capability must be globally unique. + */ + +export interface CapabilitySDKType { + index: Long; +} +/** + * Owner defines a single capability owner. An owner is defined by the name of + * capability and the module name. + */ + +export interface Owner { + module: string; + name: string; +} +/** + * Owner defines a single capability owner. An owner is defined by the name of + * capability and the module name. + */ + +export interface OwnerSDKType { + module: string; + name: string; +} +/** + * CapabilityOwners defines a set of owners of a single Capability. The set of + * owners must be unique. + */ + +export interface CapabilityOwners { + owners: Owner[]; +} +/** + * CapabilityOwners defines a set of owners of a single Capability. The set of + * owners must be unique. + */ + +export interface CapabilityOwnersSDKType { + owners: OwnerSDKType[]; +} + +function createBaseCapability(): Capability { + return { + index: Long.UZERO + }; +} + +export const Capability = { + encode(message: Capability, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.index.isZero()) { + writer.uint32(8).uint64(message.index); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Capability { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCapability(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.index = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Capability { + const message = createBaseCapability(); + message.index = object.index !== undefined && object.index !== null ? Long.fromValue(object.index) : Long.UZERO; + return message; + } + +}; + +function createBaseOwner(): Owner { + return { + module: "", + name: "" + }; +} + +export const Owner = { + encode(message: Owner, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.module !== "") { + writer.uint32(10).string(message.module); + } + + if (message.name !== "") { + writer.uint32(18).string(message.name); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Owner { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOwner(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.module = reader.string(); + break; + + case 2: + message.name = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Owner { + const message = createBaseOwner(); + message.module = object.module ?? ""; + message.name = object.name ?? ""; + return message; + } + +}; + +function createBaseCapabilityOwners(): CapabilityOwners { + return { + owners: [] + }; +} + +export const CapabilityOwners = { + encode(message: CapabilityOwners, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.owners) { + Owner.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CapabilityOwners { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCapabilityOwners(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.owners.push(Owner.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CapabilityOwners { + const message = createBaseCapabilityOwners(); + message.owners = object.owners?.map(e => Owner.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/capability/v1beta1/genesis.ts b/packages/codegen/src/cosmos/capability/v1beta1/genesis.ts new file mode 100644 index 00000000..28fddb1c --- /dev/null +++ b/packages/codegen/src/cosmos/capability/v1beta1/genesis.ts @@ -0,0 +1,146 @@ +import { CapabilityOwners, CapabilityOwnersSDKType } from "./capability"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisOwners defines the capability owners with their corresponding index. */ + +export interface GenesisOwners { + /** index is the index of the capability owner. */ + index: Long; + /** index_owners are the owners at the given index. */ + + indexOwners?: CapabilityOwners; +} +/** GenesisOwners defines the capability owners with their corresponding index. */ + +export interface GenesisOwnersSDKType { + index: Long; + index_owners?: CapabilityOwnersSDKType; +} +/** GenesisState defines the capability module's genesis state. */ + +export interface GenesisState { + /** index is the capability global index. */ + index: Long; + /** + * owners represents a map from index to owners of the capability index + * index key is string to allow amino marshalling. + */ + + owners: GenesisOwners[]; +} +/** GenesisState defines the capability module's genesis state. */ + +export interface GenesisStateSDKType { + index: Long; + owners: GenesisOwnersSDKType[]; +} + +function createBaseGenesisOwners(): GenesisOwners { + return { + index: Long.UZERO, + indexOwners: undefined + }; +} + +export const GenesisOwners = { + encode(message: GenesisOwners, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.index.isZero()) { + writer.uint32(8).uint64(message.index); + } + + if (message.indexOwners !== undefined) { + CapabilityOwners.encode(message.indexOwners, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisOwners { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisOwners(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.index = (reader.uint64() as Long); + break; + + case 2: + message.indexOwners = CapabilityOwners.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisOwners { + const message = createBaseGenesisOwners(); + message.index = object.index !== undefined && object.index !== null ? Long.fromValue(object.index) : Long.UZERO; + message.indexOwners = object.indexOwners !== undefined && object.indexOwners !== null ? CapabilityOwners.fromPartial(object.indexOwners) : undefined; + return message; + } + +}; + +function createBaseGenesisState(): GenesisState { + return { + index: Long.UZERO, + owners: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.index.isZero()) { + writer.uint32(8).uint64(message.index); + } + + for (const v of message.owners) { + GenesisOwners.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.index = (reader.uint64() as Long); + break; + + case 2: + message.owners.push(GenesisOwners.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.index = object.index !== undefined && object.index !== null ? Long.fromValue(object.index) : Long.UZERO; + message.owners = object.owners?.map(e => GenesisOwners.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/crisis/v1beta1/genesis.ts b/packages/codegen/src/cosmos/crisis/v1beta1/genesis.ts new file mode 100644 index 00000000..4b2cf5df --- /dev/null +++ b/packages/codegen/src/cosmos/crisis/v1beta1/genesis.ts @@ -0,0 +1,62 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the crisis module's genesis state. */ + +export interface GenesisState { + /** + * constant_fee is the fee used to verify the invariant in the crisis + * module. + */ + constantFee?: Coin; +} +/** GenesisState defines the crisis module's genesis state. */ + +export interface GenesisStateSDKType { + constant_fee?: CoinSDKType; +} + +function createBaseGenesisState(): GenesisState { + return { + constantFee: undefined + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.constantFee !== undefined) { + Coin.encode(message.constantFee, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 3: + message.constantFee = Coin.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.constantFee = object.constantFee !== undefined && object.constantFee !== null ? Coin.fromPartial(object.constantFee) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/crisis/v1beta1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/crisis/v1beta1/tx.rpc.msg.ts new file mode 100644 index 00000000..cd2c0387 --- /dev/null +++ b/packages/codegen/src/cosmos/crisis/v1beta1/tx.rpc.msg.ts @@ -0,0 +1,24 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgVerifyInvariant, MsgVerifyInvariantResponse } from "./tx"; +/** Msg defines the bank Msg service. */ + +export interface Msg { + /** VerifyInvariant defines a method to verify a particular invariance. */ + verifyInvariant(request: MsgVerifyInvariant): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.verifyInvariant = this.verifyInvariant.bind(this); + } + + verifyInvariant(request: MsgVerifyInvariant): Promise { + const data = MsgVerifyInvariant.encode(request).finish(); + const promise = this.rpc.request("cosmos.crisis.v1beta1.Msg", "VerifyInvariant", data); + return promise.then(data => MsgVerifyInvariantResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/crisis/v1beta1/tx.ts b/packages/codegen/src/cosmos/crisis/v1beta1/tx.ts new file mode 100644 index 00000000..14e3dca6 --- /dev/null +++ b/packages/codegen/src/cosmos/crisis/v1beta1/tx.ts @@ -0,0 +1,121 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** MsgVerifyInvariant represents a message to verify a particular invariance. */ + +export interface MsgVerifyInvariant { + sender: string; + invariantModuleName: string; + invariantRoute: string; +} +/** MsgVerifyInvariant represents a message to verify a particular invariance. */ + +export interface MsgVerifyInvariantSDKType { + sender: string; + invariant_module_name: string; + invariant_route: string; +} +/** MsgVerifyInvariantResponse defines the Msg/VerifyInvariant response type. */ + +export interface MsgVerifyInvariantResponse {} +/** MsgVerifyInvariantResponse defines the Msg/VerifyInvariant response type. */ + +export interface MsgVerifyInvariantResponseSDKType {} + +function createBaseMsgVerifyInvariant(): MsgVerifyInvariant { + return { + sender: "", + invariantModuleName: "", + invariantRoute: "" + }; +} + +export const MsgVerifyInvariant = { + encode(message: MsgVerifyInvariant, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sender !== "") { + writer.uint32(10).string(message.sender); + } + + if (message.invariantModuleName !== "") { + writer.uint32(18).string(message.invariantModuleName); + } + + if (message.invariantRoute !== "") { + writer.uint32(26).string(message.invariantRoute); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVerifyInvariant { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgVerifyInvariant(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sender = reader.string(); + break; + + case 2: + message.invariantModuleName = reader.string(); + break; + + case 3: + message.invariantRoute = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgVerifyInvariant { + const message = createBaseMsgVerifyInvariant(); + message.sender = object.sender ?? ""; + message.invariantModuleName = object.invariantModuleName ?? ""; + message.invariantRoute = object.invariantRoute ?? ""; + return message; + } + +}; + +function createBaseMsgVerifyInvariantResponse(): MsgVerifyInvariantResponse { + return {}; +} + +export const MsgVerifyInvariantResponse = { + encode(_: MsgVerifyInvariantResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVerifyInvariantResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgVerifyInvariantResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgVerifyInvariantResponse { + const message = createBaseMsgVerifyInvariantResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/crypto/ed25519/keys.ts b/packages/codegen/src/cosmos/crypto/ed25519/keys.ts new file mode 100644 index 00000000..cfa59313 --- /dev/null +++ b/packages/codegen/src/cosmos/crypto/ed25519/keys.ts @@ -0,0 +1,130 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * PubKey is an ed25519 public key for handling Tendermint keys in SDK. + * It's needed for Any serialization and SDK compatibility. + * It must not be used in a non Tendermint key context because it doesn't implement + * ADR-28. Nevertheless, you will like to use ed25519 in app user level + * then you must create a new proto message and follow ADR-28 for Address construction. + */ + +export interface PubKey { + key: Uint8Array; +} +/** + * PubKey is an ed25519 public key for handling Tendermint keys in SDK. + * It's needed for Any serialization and SDK compatibility. + * It must not be used in a non Tendermint key context because it doesn't implement + * ADR-28. Nevertheless, you will like to use ed25519 in app user level + * then you must create a new proto message and follow ADR-28 for Address construction. + */ + +export interface PubKeySDKType { + key: Uint8Array; +} +/** + * Deprecated: PrivKey defines a ed25519 private key. + * NOTE: ed25519 keys must not be used in SDK apps except in a tendermint validator context. + */ + +export interface PrivKey { + key: Uint8Array; +} +/** + * Deprecated: PrivKey defines a ed25519 private key. + * NOTE: ed25519 keys must not be used in SDK apps except in a tendermint validator context. + */ + +export interface PrivKeySDKType { + key: Uint8Array; +} + +function createBasePubKey(): PubKey { + return { + key: new Uint8Array() + }; +} + +export const PubKey = { + encode(message: PubKey, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PubKey { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePubKey(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PubKey { + const message = createBasePubKey(); + message.key = object.key ?? new Uint8Array(); + return message; + } + +}; + +function createBasePrivKey(): PrivKey { + return { + key: new Uint8Array() + }; +} + +export const PrivKey = { + encode(message: PrivKey, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PrivKey { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePrivKey(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PrivKey { + const message = createBasePrivKey(); + message.key = object.key ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/crypto/hd/v1/hd.ts b/packages/codegen/src/cosmos/crypto/hd/v1/hd.ts new file mode 100644 index 00000000..5fad0324 --- /dev/null +++ b/packages/codegen/src/cosmos/crypto/hd/v1/hd.ts @@ -0,0 +1,117 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** BIP44Params is used as path field in ledger item in Record. */ + +export interface BIP44Params { + /** purpose is a constant set to 44' (or 0x8000002C) following the BIP43 recommendation */ + purpose: number; + /** coin_type is a constant that improves privacy */ + + coinType: number; + /** account splits the key space into independent user identities */ + + account: number; + /** + * change is a constant used for public derivation. Constant 0 is used for external chain and constant 1 for internal + * chain. + */ + + change: boolean; + /** address_index is used as child index in BIP32 derivation */ + + addressIndex: number; +} +/** BIP44Params is used as path field in ledger item in Record. */ + +export interface BIP44ParamsSDKType { + purpose: number; + coin_type: number; + account: number; + change: boolean; + address_index: number; +} + +function createBaseBIP44Params(): BIP44Params { + return { + purpose: 0, + coinType: 0, + account: 0, + change: false, + addressIndex: 0 + }; +} + +export const BIP44Params = { + encode(message: BIP44Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.purpose !== 0) { + writer.uint32(8).uint32(message.purpose); + } + + if (message.coinType !== 0) { + writer.uint32(16).uint32(message.coinType); + } + + if (message.account !== 0) { + writer.uint32(24).uint32(message.account); + } + + if (message.change === true) { + writer.uint32(32).bool(message.change); + } + + if (message.addressIndex !== 0) { + writer.uint32(40).uint32(message.addressIndex); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BIP44Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBIP44Params(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.purpose = reader.uint32(); + break; + + case 2: + message.coinType = reader.uint32(); + break; + + case 3: + message.account = reader.uint32(); + break; + + case 4: + message.change = reader.bool(); + break; + + case 5: + message.addressIndex = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BIP44Params { + const message = createBaseBIP44Params(); + message.purpose = object.purpose ?? 0; + message.coinType = object.coinType ?? 0; + message.account = object.account ?? 0; + message.change = object.change ?? false; + message.addressIndex = object.addressIndex ?? 0; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/crypto/keyring/v1/record.ts b/packages/codegen/src/cosmos/crypto/keyring/v1/record.ts new file mode 100644 index 00000000..773df1d6 --- /dev/null +++ b/packages/codegen/src/cosmos/crypto/keyring/v1/record.ts @@ -0,0 +1,338 @@ +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { BIP44Params, BIP44ParamsSDKType } from "../../hd/v1/hd"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** Record is used for representing a key in the keyring. */ + +export interface Record { + /** name represents a name of Record */ + name: string; + /** pub_key represents a public key in any format */ + + pubKey?: Any; + /** local stores the public information about a locally stored key */ + + local?: Record_Local; + /** ledger stores the public information about a Ledger key */ + + ledger?: Record_Ledger; + /** Multi does not store any information. */ + + multi?: Record_Multi; + /** Offline does not store any information. */ + + offline?: Record_Offline; +} +/** Record is used for representing a key in the keyring. */ + +export interface RecordSDKType { + name: string; + pub_key?: AnySDKType; + local?: Record_LocalSDKType; + ledger?: Record_LedgerSDKType; + multi?: Record_MultiSDKType; + offline?: Record_OfflineSDKType; +} +/** + * Item is a keyring item stored in a keyring backend. + * Local item + */ + +export interface Record_Local { + privKey?: Any; + privKeyType: string; +} +/** + * Item is a keyring item stored in a keyring backend. + * Local item + */ + +export interface Record_LocalSDKType { + priv_key?: AnySDKType; + priv_key_type: string; +} +/** Ledger item */ + +export interface Record_Ledger { + path?: BIP44Params; +} +/** Ledger item */ + +export interface Record_LedgerSDKType { + path?: BIP44ParamsSDKType; +} +/** Multi item */ + +export interface Record_Multi {} +/** Multi item */ + +export interface Record_MultiSDKType {} +/** Offline item */ + +export interface Record_Offline {} +/** Offline item */ + +export interface Record_OfflineSDKType {} + +function createBaseRecord(): Record { + return { + name: "", + pubKey: undefined, + local: undefined, + ledger: undefined, + multi: undefined, + offline: undefined + }; +} + +export const Record = { + encode(message: Record, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.pubKey !== undefined) { + Any.encode(message.pubKey, writer.uint32(18).fork()).ldelim(); + } + + if (message.local !== undefined) { + Record_Local.encode(message.local, writer.uint32(26).fork()).ldelim(); + } + + if (message.ledger !== undefined) { + Record_Ledger.encode(message.ledger, writer.uint32(34).fork()).ldelim(); + } + + if (message.multi !== undefined) { + Record_Multi.encode(message.multi, writer.uint32(42).fork()).ldelim(); + } + + if (message.offline !== undefined) { + Record_Offline.encode(message.offline, writer.uint32(50).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Record { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRecord(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.pubKey = Any.decode(reader, reader.uint32()); + break; + + case 3: + message.local = Record_Local.decode(reader, reader.uint32()); + break; + + case 4: + message.ledger = Record_Ledger.decode(reader, reader.uint32()); + break; + + case 5: + message.multi = Record_Multi.decode(reader, reader.uint32()); + break; + + case 6: + message.offline = Record_Offline.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Record { + const message = createBaseRecord(); + message.name = object.name ?? ""; + message.pubKey = object.pubKey !== undefined && object.pubKey !== null ? Any.fromPartial(object.pubKey) : undefined; + message.local = object.local !== undefined && object.local !== null ? Record_Local.fromPartial(object.local) : undefined; + message.ledger = object.ledger !== undefined && object.ledger !== null ? Record_Ledger.fromPartial(object.ledger) : undefined; + message.multi = object.multi !== undefined && object.multi !== null ? Record_Multi.fromPartial(object.multi) : undefined; + message.offline = object.offline !== undefined && object.offline !== null ? Record_Offline.fromPartial(object.offline) : undefined; + return message; + } + +}; + +function createBaseRecord_Local(): Record_Local { + return { + privKey: undefined, + privKeyType: "" + }; +} + +export const Record_Local = { + encode(message: Record_Local, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.privKey !== undefined) { + Any.encode(message.privKey, writer.uint32(10).fork()).ldelim(); + } + + if (message.privKeyType !== "") { + writer.uint32(18).string(message.privKeyType); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Record_Local { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRecord_Local(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.privKey = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.privKeyType = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Record_Local { + const message = createBaseRecord_Local(); + message.privKey = object.privKey !== undefined && object.privKey !== null ? Any.fromPartial(object.privKey) : undefined; + message.privKeyType = object.privKeyType ?? ""; + return message; + } + +}; + +function createBaseRecord_Ledger(): Record_Ledger { + return { + path: undefined + }; +} + +export const Record_Ledger = { + encode(message: Record_Ledger, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path !== undefined) { + BIP44Params.encode(message.path, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Record_Ledger { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRecord_Ledger(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = BIP44Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Record_Ledger { + const message = createBaseRecord_Ledger(); + message.path = object.path !== undefined && object.path !== null ? BIP44Params.fromPartial(object.path) : undefined; + return message; + } + +}; + +function createBaseRecord_Multi(): Record_Multi { + return {}; +} + +export const Record_Multi = { + encode(_: Record_Multi, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Record_Multi { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRecord_Multi(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): Record_Multi { + const message = createBaseRecord_Multi(); + return message; + } + +}; + +function createBaseRecord_Offline(): Record_Offline { + return {}; +} + +export const Record_Offline = { + encode(_: Record_Offline, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Record_Offline { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRecord_Offline(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): Record_Offline { + const message = createBaseRecord_Offline(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/crypto/multisig/keys.ts b/packages/codegen/src/cosmos/crypto/multisig/keys.ts new file mode 100644 index 00000000..7a63e069 --- /dev/null +++ b/packages/codegen/src/cosmos/crypto/multisig/keys.ts @@ -0,0 +1,78 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * LegacyAminoPubKey specifies a public key type + * which nests multiple public keys and a threshold, + * it uses legacy amino address rules. + */ + +export interface LegacyAminoPubKey { + threshold: number; + publicKeys: Any[]; +} +/** + * LegacyAminoPubKey specifies a public key type + * which nests multiple public keys and a threshold, + * it uses legacy amino address rules. + */ + +export interface LegacyAminoPubKeySDKType { + threshold: number; + public_keys: AnySDKType[]; +} + +function createBaseLegacyAminoPubKey(): LegacyAminoPubKey { + return { + threshold: 0, + publicKeys: [] + }; +} + +export const LegacyAminoPubKey = { + encode(message: LegacyAminoPubKey, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.threshold !== 0) { + writer.uint32(8).uint32(message.threshold); + } + + for (const v of message.publicKeys) { + Any.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): LegacyAminoPubKey { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseLegacyAminoPubKey(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.threshold = reader.uint32(); + break; + + case 2: + message.publicKeys.push(Any.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): LegacyAminoPubKey { + const message = createBaseLegacyAminoPubKey(); + message.threshold = object.threshold ?? 0; + message.publicKeys = object.publicKeys?.map(e => Any.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/crypto/multisig/v1beta1/multisig.ts b/packages/codegen/src/cosmos/crypto/multisig/v1beta1/multisig.ts new file mode 100644 index 00000000..3a6e570c --- /dev/null +++ b/packages/codegen/src/cosmos/crypto/multisig/v1beta1/multisig.ts @@ -0,0 +1,142 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * MultiSignature wraps the signatures from a multisig.LegacyAminoPubKey. + * See cosmos.tx.v1betata1.ModeInfo.Multi for how to specify which signers + * signed and with which modes. + */ + +export interface MultiSignature { + signatures: Uint8Array[]; +} +/** + * MultiSignature wraps the signatures from a multisig.LegacyAminoPubKey. + * See cosmos.tx.v1betata1.ModeInfo.Multi for how to specify which signers + * signed and with which modes. + */ + +export interface MultiSignatureSDKType { + signatures: Uint8Array[]; +} +/** + * CompactBitArray is an implementation of a space efficient bit array. + * This is used to ensure that the encoded data takes up a minimal amount of + * space after proto encoding. + * This is not thread safe, and is not intended for concurrent usage. + */ + +export interface CompactBitArray { + extraBitsStored: number; + elems: Uint8Array; +} +/** + * CompactBitArray is an implementation of a space efficient bit array. + * This is used to ensure that the encoded data takes up a minimal amount of + * space after proto encoding. + * This is not thread safe, and is not intended for concurrent usage. + */ + +export interface CompactBitArraySDKType { + extra_bits_stored: number; + elems: Uint8Array; +} + +function createBaseMultiSignature(): MultiSignature { + return { + signatures: [] + }; +} + +export const MultiSignature = { + encode(message: MultiSignature, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.signatures) { + writer.uint32(10).bytes(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MultiSignature { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMultiSignature(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.signatures.push(reader.bytes()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MultiSignature { + const message = createBaseMultiSignature(); + message.signatures = object.signatures?.map(e => e) || []; + return message; + } + +}; + +function createBaseCompactBitArray(): CompactBitArray { + return { + extraBitsStored: 0, + elems: new Uint8Array() + }; +} + +export const CompactBitArray = { + encode(message: CompactBitArray, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.extraBitsStored !== 0) { + writer.uint32(8).uint32(message.extraBitsStored); + } + + if (message.elems.length !== 0) { + writer.uint32(18).bytes(message.elems); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CompactBitArray { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCompactBitArray(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.extraBitsStored = reader.uint32(); + break; + + case 2: + message.elems = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CompactBitArray { + const message = createBaseCompactBitArray(); + message.extraBitsStored = object.extraBitsStored ?? 0; + message.elems = object.elems ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/crypto/secp256k1/keys.ts b/packages/codegen/src/cosmos/crypto/secp256k1/keys.ts new file mode 100644 index 00000000..576b262f --- /dev/null +++ b/packages/codegen/src/cosmos/crypto/secp256k1/keys.ts @@ -0,0 +1,124 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * PubKey defines a secp256k1 public key + * Key is the compressed form of the pubkey. The first byte depends is a 0x02 byte + * if the y-coordinate is the lexicographically largest of the two associated with + * the x-coordinate. Otherwise the first byte is a 0x03. + * This prefix is followed with the x-coordinate. + */ + +export interface PubKey { + key: Uint8Array; +} +/** + * PubKey defines a secp256k1 public key + * Key is the compressed form of the pubkey. The first byte depends is a 0x02 byte + * if the y-coordinate is the lexicographically largest of the two associated with + * the x-coordinate. Otherwise the first byte is a 0x03. + * This prefix is followed with the x-coordinate. + */ + +export interface PubKeySDKType { + key: Uint8Array; +} +/** PrivKey defines a secp256k1 private key. */ + +export interface PrivKey { + key: Uint8Array; +} +/** PrivKey defines a secp256k1 private key. */ + +export interface PrivKeySDKType { + key: Uint8Array; +} + +function createBasePubKey(): PubKey { + return { + key: new Uint8Array() + }; +} + +export const PubKey = { + encode(message: PubKey, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PubKey { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePubKey(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PubKey { + const message = createBasePubKey(); + message.key = object.key ?? new Uint8Array(); + return message; + } + +}; + +function createBasePrivKey(): PrivKey { + return { + key: new Uint8Array() + }; +} + +export const PrivKey = { + encode(message: PrivKey, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PrivKey { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePrivKey(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PrivKey { + const message = createBasePrivKey(); + message.key = object.key ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/crypto/secp256r1/keys.ts b/packages/codegen/src/cosmos/crypto/secp256r1/keys.ts new file mode 100644 index 00000000..ffb3240e --- /dev/null +++ b/packages/codegen/src/cosmos/crypto/secp256r1/keys.ts @@ -0,0 +1,117 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** PubKey defines a secp256r1 ECDSA public key. */ + +export interface PubKey { + /** + * Point on secp256r1 curve in a compressed representation as specified in section + * 4.3.6 of ANSI X9.62: https://webstore.ansi.org/standards/ascx9/ansix9621998 + */ + key: Uint8Array; +} +/** PubKey defines a secp256r1 ECDSA public key. */ + +export interface PubKeySDKType { + key: Uint8Array; +} +/** PrivKey defines a secp256r1 ECDSA private key. */ + +export interface PrivKey { + /** secret number serialized using big-endian encoding */ + secret: Uint8Array; +} +/** PrivKey defines a secp256r1 ECDSA private key. */ + +export interface PrivKeySDKType { + secret: Uint8Array; +} + +function createBasePubKey(): PubKey { + return { + key: new Uint8Array() + }; +} + +export const PubKey = { + encode(message: PubKey, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PubKey { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePubKey(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PubKey { + const message = createBasePubKey(); + message.key = object.key ?? new Uint8Array(); + return message; + } + +}; + +function createBasePrivKey(): PrivKey { + return { + secret: new Uint8Array() + }; +} + +export const PrivKey = { + encode(message: PrivKey, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.secret.length !== 0) { + writer.uint32(10).bytes(message.secret); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PrivKey { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePrivKey(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.secret = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PrivKey { + const message = createBasePrivKey(); + message.secret = object.secret ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/distribution/v1beta1/distribution.ts b/packages/codegen/src/cosmos/distribution/v1beta1/distribution.ts new file mode 100644 index 00000000..8cdec795 --- /dev/null +++ b/packages/codegen/src/cosmos/distribution/v1beta1/distribution.ts @@ -0,0 +1,945 @@ +import { DecCoin, DecCoinSDKType, Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** Params defines the set of params for the distribution module. */ + +export interface Params { + communityTax: string; + baseProposerReward: string; + bonusProposerReward: string; + withdrawAddrEnabled: boolean; +} +/** Params defines the set of params for the distribution module. */ + +export interface ParamsSDKType { + community_tax: string; + base_proposer_reward: string; + bonus_proposer_reward: string; + withdraw_addr_enabled: boolean; +} +/** + * ValidatorHistoricalRewards represents historical rewards for a validator. + * Height is implicit within the store key. + * Cumulative reward ratio is the sum from the zeroeth period + * until this period of rewards / tokens, per the spec. + * The reference count indicates the number of objects + * which might need to reference this historical entry at any point. + * ReferenceCount = + * number of outstanding delegations which ended the associated period (and + * might need to read that record) + * + number of slashes which ended the associated period (and might need to + * read that record) + * + one per validator for the zeroeth period, set on initialization + */ + +export interface ValidatorHistoricalRewards { + cumulativeRewardRatio: DecCoin[]; + referenceCount: number; +} +/** + * ValidatorHistoricalRewards represents historical rewards for a validator. + * Height is implicit within the store key. + * Cumulative reward ratio is the sum from the zeroeth period + * until this period of rewards / tokens, per the spec. + * The reference count indicates the number of objects + * which might need to reference this historical entry at any point. + * ReferenceCount = + * number of outstanding delegations which ended the associated period (and + * might need to read that record) + * + number of slashes which ended the associated period (and might need to + * read that record) + * + one per validator for the zeroeth period, set on initialization + */ + +export interface ValidatorHistoricalRewardsSDKType { + cumulative_reward_ratio: DecCoinSDKType[]; + reference_count: number; +} +/** + * ValidatorCurrentRewards represents current rewards and current + * period for a validator kept as a running counter and incremented + * each block as long as the validator's tokens remain constant. + */ + +export interface ValidatorCurrentRewards { + rewards: DecCoin[]; + period: Long; +} +/** + * ValidatorCurrentRewards represents current rewards and current + * period for a validator kept as a running counter and incremented + * each block as long as the validator's tokens remain constant. + */ + +export interface ValidatorCurrentRewardsSDKType { + rewards: DecCoinSDKType[]; + period: Long; +} +/** + * ValidatorAccumulatedCommission represents accumulated commission + * for a validator kept as a running counter, can be withdrawn at any time. + */ + +export interface ValidatorAccumulatedCommission { + commission: DecCoin[]; +} +/** + * ValidatorAccumulatedCommission represents accumulated commission + * for a validator kept as a running counter, can be withdrawn at any time. + */ + +export interface ValidatorAccumulatedCommissionSDKType { + commission: DecCoinSDKType[]; +} +/** + * ValidatorOutstandingRewards represents outstanding (un-withdrawn) rewards + * for a validator inexpensive to track, allows simple sanity checks. + */ + +export interface ValidatorOutstandingRewards { + rewards: DecCoin[]; +} +/** + * ValidatorOutstandingRewards represents outstanding (un-withdrawn) rewards + * for a validator inexpensive to track, allows simple sanity checks. + */ + +export interface ValidatorOutstandingRewardsSDKType { + rewards: DecCoinSDKType[]; +} +/** + * ValidatorSlashEvent represents a validator slash event. + * Height is implicit within the store key. + * This is needed to calculate appropriate amount of staking tokens + * for delegations which are withdrawn after a slash has occurred. + */ + +export interface ValidatorSlashEvent { + validatorPeriod: Long; + fraction: string; +} +/** + * ValidatorSlashEvent represents a validator slash event. + * Height is implicit within the store key. + * This is needed to calculate appropriate amount of staking tokens + * for delegations which are withdrawn after a slash has occurred. + */ + +export interface ValidatorSlashEventSDKType { + validator_period: Long; + fraction: string; +} +/** ValidatorSlashEvents is a collection of ValidatorSlashEvent messages. */ + +export interface ValidatorSlashEvents { + validatorSlashEvents: ValidatorSlashEvent[]; +} +/** ValidatorSlashEvents is a collection of ValidatorSlashEvent messages. */ + +export interface ValidatorSlashEventsSDKType { + validator_slash_events: ValidatorSlashEventSDKType[]; +} +/** FeePool is the global fee pool for distribution. */ + +export interface FeePool { + communityPool: DecCoin[]; +} +/** FeePool is the global fee pool for distribution. */ + +export interface FeePoolSDKType { + community_pool: DecCoinSDKType[]; +} +/** + * CommunityPoolSpendProposal details a proposal for use of community funds, + * together with how many coins are proposed to be spent, and to which + * recipient account. + */ + +export interface CommunityPoolSpendProposal { + title: string; + description: string; + recipient: string; + amount: Coin[]; +} +/** + * CommunityPoolSpendProposal details a proposal for use of community funds, + * together with how many coins are proposed to be spent, and to which + * recipient account. + */ + +export interface CommunityPoolSpendProposalSDKType { + title: string; + description: string; + recipient: string; + amount: CoinSDKType[]; +} +/** + * DelegatorStartingInfo represents the starting info for a delegator reward + * period. It tracks the previous validator period, the delegation's amount of + * staking token, and the creation height (to check later on if any slashes have + * occurred). NOTE: Even though validators are slashed to whole staking tokens, + * the delegators within the validator may be left with less than a full token, + * thus sdk.Dec is used. + */ + +export interface DelegatorStartingInfo { + previousPeriod: Long; + stake: string; + height: Long; +} +/** + * DelegatorStartingInfo represents the starting info for a delegator reward + * period. It tracks the previous validator period, the delegation's amount of + * staking token, and the creation height (to check later on if any slashes have + * occurred). NOTE: Even though validators are slashed to whole staking tokens, + * the delegators within the validator may be left with less than a full token, + * thus sdk.Dec is used. + */ + +export interface DelegatorStartingInfoSDKType { + previous_period: Long; + stake: string; + height: Long; +} +/** + * DelegationDelegatorReward represents the properties + * of a delegator's delegation reward. + */ + +export interface DelegationDelegatorReward { + validatorAddress: string; + reward: DecCoin[]; +} +/** + * DelegationDelegatorReward represents the properties + * of a delegator's delegation reward. + */ + +export interface DelegationDelegatorRewardSDKType { + validator_address: string; + reward: DecCoinSDKType[]; +} +/** + * CommunityPoolSpendProposalWithDeposit defines a CommunityPoolSpendProposal + * with a deposit + */ + +export interface CommunityPoolSpendProposalWithDeposit { + title: string; + description: string; + recipient: string; + amount: string; + deposit: string; +} +/** + * CommunityPoolSpendProposalWithDeposit defines a CommunityPoolSpendProposal + * with a deposit + */ + +export interface CommunityPoolSpendProposalWithDepositSDKType { + title: string; + description: string; + recipient: string; + amount: string; + deposit: string; +} + +function createBaseParams(): Params { + return { + communityTax: "", + baseProposerReward: "", + bonusProposerReward: "", + withdrawAddrEnabled: false + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.communityTax !== "") { + writer.uint32(10).string(message.communityTax); + } + + if (message.baseProposerReward !== "") { + writer.uint32(18).string(message.baseProposerReward); + } + + if (message.bonusProposerReward !== "") { + writer.uint32(26).string(message.bonusProposerReward); + } + + if (message.withdrawAddrEnabled === true) { + writer.uint32(32).bool(message.withdrawAddrEnabled); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.communityTax = reader.string(); + break; + + case 2: + message.baseProposerReward = reader.string(); + break; + + case 3: + message.bonusProposerReward = reader.string(); + break; + + case 4: + message.withdrawAddrEnabled = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Params { + const message = createBaseParams(); + message.communityTax = object.communityTax ?? ""; + message.baseProposerReward = object.baseProposerReward ?? ""; + message.bonusProposerReward = object.bonusProposerReward ?? ""; + message.withdrawAddrEnabled = object.withdrawAddrEnabled ?? false; + return message; + } + +}; + +function createBaseValidatorHistoricalRewards(): ValidatorHistoricalRewards { + return { + cumulativeRewardRatio: [], + referenceCount: 0 + }; +} + +export const ValidatorHistoricalRewards = { + encode(message: ValidatorHistoricalRewards, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.cumulativeRewardRatio) { + DecCoin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.referenceCount !== 0) { + writer.uint32(16).uint32(message.referenceCount); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorHistoricalRewards { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorHistoricalRewards(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.cumulativeRewardRatio.push(DecCoin.decode(reader, reader.uint32())); + break; + + case 2: + message.referenceCount = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorHistoricalRewards { + const message = createBaseValidatorHistoricalRewards(); + message.cumulativeRewardRatio = object.cumulativeRewardRatio?.map(e => DecCoin.fromPartial(e)) || []; + message.referenceCount = object.referenceCount ?? 0; + return message; + } + +}; + +function createBaseValidatorCurrentRewards(): ValidatorCurrentRewards { + return { + rewards: [], + period: Long.UZERO + }; +} + +export const ValidatorCurrentRewards = { + encode(message: ValidatorCurrentRewards, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rewards) { + DecCoin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (!message.period.isZero()) { + writer.uint32(16).uint64(message.period); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorCurrentRewards { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorCurrentRewards(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.rewards.push(DecCoin.decode(reader, reader.uint32())); + break; + + case 2: + message.period = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorCurrentRewards { + const message = createBaseValidatorCurrentRewards(); + message.rewards = object.rewards?.map(e => DecCoin.fromPartial(e)) || []; + message.period = object.period !== undefined && object.period !== null ? Long.fromValue(object.period) : Long.UZERO; + return message; + } + +}; + +function createBaseValidatorAccumulatedCommission(): ValidatorAccumulatedCommission { + return { + commission: [] + }; +} + +export const ValidatorAccumulatedCommission = { + encode(message: ValidatorAccumulatedCommission, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.commission) { + DecCoin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorAccumulatedCommission { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorAccumulatedCommission(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.commission.push(DecCoin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorAccumulatedCommission { + const message = createBaseValidatorAccumulatedCommission(); + message.commission = object.commission?.map(e => DecCoin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseValidatorOutstandingRewards(): ValidatorOutstandingRewards { + return { + rewards: [] + }; +} + +export const ValidatorOutstandingRewards = { + encode(message: ValidatorOutstandingRewards, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rewards) { + DecCoin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorOutstandingRewards { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorOutstandingRewards(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.rewards.push(DecCoin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorOutstandingRewards { + const message = createBaseValidatorOutstandingRewards(); + message.rewards = object.rewards?.map(e => DecCoin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseValidatorSlashEvent(): ValidatorSlashEvent { + return { + validatorPeriod: Long.UZERO, + fraction: "" + }; +} + +export const ValidatorSlashEvent = { + encode(message: ValidatorSlashEvent, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.validatorPeriod.isZero()) { + writer.uint32(8).uint64(message.validatorPeriod); + } + + if (message.fraction !== "") { + writer.uint32(18).string(message.fraction); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorSlashEvent { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorSlashEvent(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorPeriod = (reader.uint64() as Long); + break; + + case 2: + message.fraction = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorSlashEvent { + const message = createBaseValidatorSlashEvent(); + message.validatorPeriod = object.validatorPeriod !== undefined && object.validatorPeriod !== null ? Long.fromValue(object.validatorPeriod) : Long.UZERO; + message.fraction = object.fraction ?? ""; + return message; + } + +}; + +function createBaseValidatorSlashEvents(): ValidatorSlashEvents { + return { + validatorSlashEvents: [] + }; +} + +export const ValidatorSlashEvents = { + encode(message: ValidatorSlashEvents, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.validatorSlashEvents) { + ValidatorSlashEvent.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorSlashEvents { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorSlashEvents(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorSlashEvents.push(ValidatorSlashEvent.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorSlashEvents { + const message = createBaseValidatorSlashEvents(); + message.validatorSlashEvents = object.validatorSlashEvents?.map(e => ValidatorSlashEvent.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseFeePool(): FeePool { + return { + communityPool: [] + }; +} + +export const FeePool = { + encode(message: FeePool, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.communityPool) { + DecCoin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FeePool { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFeePool(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.communityPool.push(DecCoin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): FeePool { + const message = createBaseFeePool(); + message.communityPool = object.communityPool?.map(e => DecCoin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseCommunityPoolSpendProposal(): CommunityPoolSpendProposal { + return { + title: "", + description: "", + recipient: "", + amount: [] + }; +} + +export const CommunityPoolSpendProposal = { + encode(message: CommunityPoolSpendProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.recipient !== "") { + writer.uint32(26).string(message.recipient); + } + + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CommunityPoolSpendProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommunityPoolSpendProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.recipient = reader.string(); + break; + + case 4: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CommunityPoolSpendProposal { + const message = createBaseCommunityPoolSpendProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.recipient = object.recipient ?? ""; + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseDelegatorStartingInfo(): DelegatorStartingInfo { + return { + previousPeriod: Long.UZERO, + stake: "", + height: Long.UZERO + }; +} + +export const DelegatorStartingInfo = { + encode(message: DelegatorStartingInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.previousPeriod.isZero()) { + writer.uint32(8).uint64(message.previousPeriod); + } + + if (message.stake !== "") { + writer.uint32(18).string(message.stake); + } + + if (!message.height.isZero()) { + writer.uint32(24).uint64(message.height); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DelegatorStartingInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDelegatorStartingInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.previousPeriod = (reader.uint64() as Long); + break; + + case 2: + message.stake = reader.string(); + break; + + case 3: + message.height = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DelegatorStartingInfo { + const message = createBaseDelegatorStartingInfo(); + message.previousPeriod = object.previousPeriod !== undefined && object.previousPeriod !== null ? Long.fromValue(object.previousPeriod) : Long.UZERO; + message.stake = object.stake ?? ""; + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.UZERO; + return message; + } + +}; + +function createBaseDelegationDelegatorReward(): DelegationDelegatorReward { + return { + validatorAddress: "", + reward: [] + }; +} + +export const DelegationDelegatorReward = { + encode(message: DelegationDelegatorReward, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddress !== "") { + writer.uint32(10).string(message.validatorAddress); + } + + for (const v of message.reward) { + DecCoin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DelegationDelegatorReward { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDelegationDelegatorReward(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddress = reader.string(); + break; + + case 2: + message.reward.push(DecCoin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DelegationDelegatorReward { + const message = createBaseDelegationDelegatorReward(); + message.validatorAddress = object.validatorAddress ?? ""; + message.reward = object.reward?.map(e => DecCoin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseCommunityPoolSpendProposalWithDeposit(): CommunityPoolSpendProposalWithDeposit { + return { + title: "", + description: "", + recipient: "", + amount: "", + deposit: "" + }; +} + +export const CommunityPoolSpendProposalWithDeposit = { + encode(message: CommunityPoolSpendProposalWithDeposit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.recipient !== "") { + writer.uint32(26).string(message.recipient); + } + + if (message.amount !== "") { + writer.uint32(34).string(message.amount); + } + + if (message.deposit !== "") { + writer.uint32(42).string(message.deposit); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CommunityPoolSpendProposalWithDeposit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommunityPoolSpendProposalWithDeposit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.recipient = reader.string(); + break; + + case 4: + message.amount = reader.string(); + break; + + case 5: + message.deposit = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CommunityPoolSpendProposalWithDeposit { + const message = createBaseCommunityPoolSpendProposalWithDeposit(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.recipient = object.recipient ?? ""; + message.amount = object.amount ?? ""; + message.deposit = object.deposit ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/distribution/v1beta1/genesis.ts b/packages/codegen/src/cosmos/distribution/v1beta1/genesis.ts new file mode 100644 index 00000000..2e911ec1 --- /dev/null +++ b/packages/codegen/src/cosmos/distribution/v1beta1/genesis.ts @@ -0,0 +1,752 @@ +import { DecCoin, DecCoinSDKType } from "../../base/v1beta1/coin"; +import { ValidatorAccumulatedCommission, ValidatorAccumulatedCommissionSDKType, ValidatorHistoricalRewards, ValidatorHistoricalRewardsSDKType, ValidatorCurrentRewards, ValidatorCurrentRewardsSDKType, DelegatorStartingInfo, DelegatorStartingInfoSDKType, ValidatorSlashEvent, ValidatorSlashEventSDKType, Params, ParamsSDKType, FeePool, FeePoolSDKType } from "./distribution"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** + * DelegatorWithdrawInfo is the address for where distributions rewards are + * withdrawn to by default this struct is only used at genesis to feed in + * default withdraw addresses. + */ + +export interface DelegatorWithdrawInfo { + /** delegator_address is the address of the delegator. */ + delegatorAddress: string; + /** withdraw_address is the address to withdraw the delegation rewards to. */ + + withdrawAddress: string; +} +/** + * DelegatorWithdrawInfo is the address for where distributions rewards are + * withdrawn to by default this struct is only used at genesis to feed in + * default withdraw addresses. + */ + +export interface DelegatorWithdrawInfoSDKType { + delegator_address: string; + withdraw_address: string; +} +/** ValidatorOutstandingRewardsRecord is used for import/export via genesis json. */ + +export interface ValidatorOutstandingRewardsRecord { + /** validator_address is the address of the validator. */ + validatorAddress: string; + /** outstanding_rewards represents the oustanding rewards of a validator. */ + + outstandingRewards: DecCoin[]; +} +/** ValidatorOutstandingRewardsRecord is used for import/export via genesis json. */ + +export interface ValidatorOutstandingRewardsRecordSDKType { + validator_address: string; + outstanding_rewards: DecCoinSDKType[]; +} +/** + * ValidatorAccumulatedCommissionRecord is used for import / export via genesis + * json. + */ + +export interface ValidatorAccumulatedCommissionRecord { + /** validator_address is the address of the validator. */ + validatorAddress: string; + /** accumulated is the accumulated commission of a validator. */ + + accumulated?: ValidatorAccumulatedCommission; +} +/** + * ValidatorAccumulatedCommissionRecord is used for import / export via genesis + * json. + */ + +export interface ValidatorAccumulatedCommissionRecordSDKType { + validator_address: string; + accumulated?: ValidatorAccumulatedCommissionSDKType; +} +/** + * ValidatorHistoricalRewardsRecord is used for import / export via genesis + * json. + */ + +export interface ValidatorHistoricalRewardsRecord { + /** validator_address is the address of the validator. */ + validatorAddress: string; + /** period defines the period the historical rewards apply to. */ + + period: Long; + /** rewards defines the historical rewards of a validator. */ + + rewards?: ValidatorHistoricalRewards; +} +/** + * ValidatorHistoricalRewardsRecord is used for import / export via genesis + * json. + */ + +export interface ValidatorHistoricalRewardsRecordSDKType { + validator_address: string; + period: Long; + rewards?: ValidatorHistoricalRewardsSDKType; +} +/** ValidatorCurrentRewardsRecord is used for import / export via genesis json. */ + +export interface ValidatorCurrentRewardsRecord { + /** validator_address is the address of the validator. */ + validatorAddress: string; + /** rewards defines the current rewards of a validator. */ + + rewards?: ValidatorCurrentRewards; +} +/** ValidatorCurrentRewardsRecord is used for import / export via genesis json. */ + +export interface ValidatorCurrentRewardsRecordSDKType { + validator_address: string; + rewards?: ValidatorCurrentRewardsSDKType; +} +/** DelegatorStartingInfoRecord used for import / export via genesis json. */ + +export interface DelegatorStartingInfoRecord { + /** delegator_address is the address of the delegator. */ + delegatorAddress: string; + /** validator_address is the address of the validator. */ + + validatorAddress: string; + /** starting_info defines the starting info of a delegator. */ + + startingInfo?: DelegatorStartingInfo; +} +/** DelegatorStartingInfoRecord used for import / export via genesis json. */ + +export interface DelegatorStartingInfoRecordSDKType { + delegator_address: string; + validator_address: string; + starting_info?: DelegatorStartingInfoSDKType; +} +/** ValidatorSlashEventRecord is used for import / export via genesis json. */ + +export interface ValidatorSlashEventRecord { + /** validator_address is the address of the validator. */ + validatorAddress: string; + /** height defines the block height at which the slash event occured. */ + + height: Long; + /** period is the period of the slash event. */ + + period: Long; + /** validator_slash_event describes the slash event. */ + + validatorSlashEvent?: ValidatorSlashEvent; +} +/** ValidatorSlashEventRecord is used for import / export via genesis json. */ + +export interface ValidatorSlashEventRecordSDKType { + validator_address: string; + height: Long; + period: Long; + validator_slash_event?: ValidatorSlashEventSDKType; +} +/** GenesisState defines the distribution module's genesis state. */ + +export interface GenesisState { + /** params defines all the paramaters of the module. */ + params?: Params; + /** fee_pool defines the fee pool at genesis. */ + + feePool?: FeePool; + /** fee_pool defines the delegator withdraw infos at genesis. */ + + delegatorWithdrawInfos: DelegatorWithdrawInfo[]; + /** fee_pool defines the previous proposer at genesis. */ + + previousProposer: string; + /** fee_pool defines the outstanding rewards of all validators at genesis. */ + + outstandingRewards: ValidatorOutstandingRewardsRecord[]; + /** fee_pool defines the accumulated commisions of all validators at genesis. */ + + validatorAccumulatedCommissions: ValidatorAccumulatedCommissionRecord[]; + /** fee_pool defines the historical rewards of all validators at genesis. */ + + validatorHistoricalRewards: ValidatorHistoricalRewardsRecord[]; + /** fee_pool defines the current rewards of all validators at genesis. */ + + validatorCurrentRewards: ValidatorCurrentRewardsRecord[]; + /** fee_pool defines the delegator starting infos at genesis. */ + + delegatorStartingInfos: DelegatorStartingInfoRecord[]; + /** fee_pool defines the validator slash events at genesis. */ + + validatorSlashEvents: ValidatorSlashEventRecord[]; +} +/** GenesisState defines the distribution module's genesis state. */ + +export interface GenesisStateSDKType { + params?: ParamsSDKType; + fee_pool?: FeePoolSDKType; + delegator_withdraw_infos: DelegatorWithdrawInfoSDKType[]; + previous_proposer: string; + outstanding_rewards: ValidatorOutstandingRewardsRecordSDKType[]; + validator_accumulated_commissions: ValidatorAccumulatedCommissionRecordSDKType[]; + validator_historical_rewards: ValidatorHistoricalRewardsRecordSDKType[]; + validator_current_rewards: ValidatorCurrentRewardsRecordSDKType[]; + delegator_starting_infos: DelegatorStartingInfoRecordSDKType[]; + validator_slash_events: ValidatorSlashEventRecordSDKType[]; +} + +function createBaseDelegatorWithdrawInfo(): DelegatorWithdrawInfo { + return { + delegatorAddress: "", + withdrawAddress: "" + }; +} + +export const DelegatorWithdrawInfo = { + encode(message: DelegatorWithdrawInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.withdrawAddress !== "") { + writer.uint32(18).string(message.withdrawAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DelegatorWithdrawInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDelegatorWithdrawInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.withdrawAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DelegatorWithdrawInfo { + const message = createBaseDelegatorWithdrawInfo(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.withdrawAddress = object.withdrawAddress ?? ""; + return message; + } + +}; + +function createBaseValidatorOutstandingRewardsRecord(): ValidatorOutstandingRewardsRecord { + return { + validatorAddress: "", + outstandingRewards: [] + }; +} + +export const ValidatorOutstandingRewardsRecord = { + encode(message: ValidatorOutstandingRewardsRecord, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddress !== "") { + writer.uint32(10).string(message.validatorAddress); + } + + for (const v of message.outstandingRewards) { + DecCoin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorOutstandingRewardsRecord { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorOutstandingRewardsRecord(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddress = reader.string(); + break; + + case 2: + message.outstandingRewards.push(DecCoin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorOutstandingRewardsRecord { + const message = createBaseValidatorOutstandingRewardsRecord(); + message.validatorAddress = object.validatorAddress ?? ""; + message.outstandingRewards = object.outstandingRewards?.map(e => DecCoin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseValidatorAccumulatedCommissionRecord(): ValidatorAccumulatedCommissionRecord { + return { + validatorAddress: "", + accumulated: undefined + }; +} + +export const ValidatorAccumulatedCommissionRecord = { + encode(message: ValidatorAccumulatedCommissionRecord, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddress !== "") { + writer.uint32(10).string(message.validatorAddress); + } + + if (message.accumulated !== undefined) { + ValidatorAccumulatedCommission.encode(message.accumulated, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorAccumulatedCommissionRecord { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorAccumulatedCommissionRecord(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddress = reader.string(); + break; + + case 2: + message.accumulated = ValidatorAccumulatedCommission.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorAccumulatedCommissionRecord { + const message = createBaseValidatorAccumulatedCommissionRecord(); + message.validatorAddress = object.validatorAddress ?? ""; + message.accumulated = object.accumulated !== undefined && object.accumulated !== null ? ValidatorAccumulatedCommission.fromPartial(object.accumulated) : undefined; + return message; + } + +}; + +function createBaseValidatorHistoricalRewardsRecord(): ValidatorHistoricalRewardsRecord { + return { + validatorAddress: "", + period: Long.UZERO, + rewards: undefined + }; +} + +export const ValidatorHistoricalRewardsRecord = { + encode(message: ValidatorHistoricalRewardsRecord, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddress !== "") { + writer.uint32(10).string(message.validatorAddress); + } + + if (!message.period.isZero()) { + writer.uint32(16).uint64(message.period); + } + + if (message.rewards !== undefined) { + ValidatorHistoricalRewards.encode(message.rewards, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorHistoricalRewardsRecord { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorHistoricalRewardsRecord(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddress = reader.string(); + break; + + case 2: + message.period = (reader.uint64() as Long); + break; + + case 3: + message.rewards = ValidatorHistoricalRewards.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorHistoricalRewardsRecord { + const message = createBaseValidatorHistoricalRewardsRecord(); + message.validatorAddress = object.validatorAddress ?? ""; + message.period = object.period !== undefined && object.period !== null ? Long.fromValue(object.period) : Long.UZERO; + message.rewards = object.rewards !== undefined && object.rewards !== null ? ValidatorHistoricalRewards.fromPartial(object.rewards) : undefined; + return message; + } + +}; + +function createBaseValidatorCurrentRewardsRecord(): ValidatorCurrentRewardsRecord { + return { + validatorAddress: "", + rewards: undefined + }; +} + +export const ValidatorCurrentRewardsRecord = { + encode(message: ValidatorCurrentRewardsRecord, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddress !== "") { + writer.uint32(10).string(message.validatorAddress); + } + + if (message.rewards !== undefined) { + ValidatorCurrentRewards.encode(message.rewards, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorCurrentRewardsRecord { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorCurrentRewardsRecord(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddress = reader.string(); + break; + + case 2: + message.rewards = ValidatorCurrentRewards.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorCurrentRewardsRecord { + const message = createBaseValidatorCurrentRewardsRecord(); + message.validatorAddress = object.validatorAddress ?? ""; + message.rewards = object.rewards !== undefined && object.rewards !== null ? ValidatorCurrentRewards.fromPartial(object.rewards) : undefined; + return message; + } + +}; + +function createBaseDelegatorStartingInfoRecord(): DelegatorStartingInfoRecord { + return { + delegatorAddress: "", + validatorAddress: "", + startingInfo: undefined + }; +} + +export const DelegatorStartingInfoRecord = { + encode(message: DelegatorStartingInfoRecord, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.validatorAddress !== "") { + writer.uint32(18).string(message.validatorAddress); + } + + if (message.startingInfo !== undefined) { + DelegatorStartingInfo.encode(message.startingInfo, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DelegatorStartingInfoRecord { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDelegatorStartingInfoRecord(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.validatorAddress = reader.string(); + break; + + case 3: + message.startingInfo = DelegatorStartingInfo.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DelegatorStartingInfoRecord { + const message = createBaseDelegatorStartingInfoRecord(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.validatorAddress = object.validatorAddress ?? ""; + message.startingInfo = object.startingInfo !== undefined && object.startingInfo !== null ? DelegatorStartingInfo.fromPartial(object.startingInfo) : undefined; + return message; + } + +}; + +function createBaseValidatorSlashEventRecord(): ValidatorSlashEventRecord { + return { + validatorAddress: "", + height: Long.UZERO, + period: Long.UZERO, + validatorSlashEvent: undefined + }; +} + +export const ValidatorSlashEventRecord = { + encode(message: ValidatorSlashEventRecord, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddress !== "") { + writer.uint32(10).string(message.validatorAddress); + } + + if (!message.height.isZero()) { + writer.uint32(16).uint64(message.height); + } + + if (!message.period.isZero()) { + writer.uint32(24).uint64(message.period); + } + + if (message.validatorSlashEvent !== undefined) { + ValidatorSlashEvent.encode(message.validatorSlashEvent, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorSlashEventRecord { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorSlashEventRecord(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddress = reader.string(); + break; + + case 2: + message.height = (reader.uint64() as Long); + break; + + case 3: + message.period = (reader.uint64() as Long); + break; + + case 4: + message.validatorSlashEvent = ValidatorSlashEvent.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorSlashEventRecord { + const message = createBaseValidatorSlashEventRecord(); + message.validatorAddress = object.validatorAddress ?? ""; + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.UZERO; + message.period = object.period !== undefined && object.period !== null ? Long.fromValue(object.period) : Long.UZERO; + message.validatorSlashEvent = object.validatorSlashEvent !== undefined && object.validatorSlashEvent !== null ? ValidatorSlashEvent.fromPartial(object.validatorSlashEvent) : undefined; + return message; + } + +}; + +function createBaseGenesisState(): GenesisState { + return { + params: undefined, + feePool: undefined, + delegatorWithdrawInfos: [], + previousProposer: "", + outstandingRewards: [], + validatorAccumulatedCommissions: [], + validatorHistoricalRewards: [], + validatorCurrentRewards: [], + delegatorStartingInfos: [], + validatorSlashEvents: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + if (message.feePool !== undefined) { + FeePool.encode(message.feePool, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.delegatorWithdrawInfos) { + DelegatorWithdrawInfo.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + if (message.previousProposer !== "") { + writer.uint32(34).string(message.previousProposer); + } + + for (const v of message.outstandingRewards) { + ValidatorOutstandingRewardsRecord.encode(v!, writer.uint32(42).fork()).ldelim(); + } + + for (const v of message.validatorAccumulatedCommissions) { + ValidatorAccumulatedCommissionRecord.encode(v!, writer.uint32(50).fork()).ldelim(); + } + + for (const v of message.validatorHistoricalRewards) { + ValidatorHistoricalRewardsRecord.encode(v!, writer.uint32(58).fork()).ldelim(); + } + + for (const v of message.validatorCurrentRewards) { + ValidatorCurrentRewardsRecord.encode(v!, writer.uint32(66).fork()).ldelim(); + } + + for (const v of message.delegatorStartingInfos) { + DelegatorStartingInfoRecord.encode(v!, writer.uint32(74).fork()).ldelim(); + } + + for (const v of message.validatorSlashEvents) { + ValidatorSlashEventRecord.encode(v!, writer.uint32(82).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + case 2: + message.feePool = FeePool.decode(reader, reader.uint32()); + break; + + case 3: + message.delegatorWithdrawInfos.push(DelegatorWithdrawInfo.decode(reader, reader.uint32())); + break; + + case 4: + message.previousProposer = reader.string(); + break; + + case 5: + message.outstandingRewards.push(ValidatorOutstandingRewardsRecord.decode(reader, reader.uint32())); + break; + + case 6: + message.validatorAccumulatedCommissions.push(ValidatorAccumulatedCommissionRecord.decode(reader, reader.uint32())); + break; + + case 7: + message.validatorHistoricalRewards.push(ValidatorHistoricalRewardsRecord.decode(reader, reader.uint32())); + break; + + case 8: + message.validatorCurrentRewards.push(ValidatorCurrentRewardsRecord.decode(reader, reader.uint32())); + break; + + case 9: + message.delegatorStartingInfos.push(DelegatorStartingInfoRecord.decode(reader, reader.uint32())); + break; + + case 10: + message.validatorSlashEvents.push(ValidatorSlashEventRecord.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + message.feePool = object.feePool !== undefined && object.feePool !== null ? FeePool.fromPartial(object.feePool) : undefined; + message.delegatorWithdrawInfos = object.delegatorWithdrawInfos?.map(e => DelegatorWithdrawInfo.fromPartial(e)) || []; + message.previousProposer = object.previousProposer ?? ""; + message.outstandingRewards = object.outstandingRewards?.map(e => ValidatorOutstandingRewardsRecord.fromPartial(e)) || []; + message.validatorAccumulatedCommissions = object.validatorAccumulatedCommissions?.map(e => ValidatorAccumulatedCommissionRecord.fromPartial(e)) || []; + message.validatorHistoricalRewards = object.validatorHistoricalRewards?.map(e => ValidatorHistoricalRewardsRecord.fromPartial(e)) || []; + message.validatorCurrentRewards = object.validatorCurrentRewards?.map(e => ValidatorCurrentRewardsRecord.fromPartial(e)) || []; + message.delegatorStartingInfos = object.delegatorStartingInfos?.map(e => DelegatorStartingInfoRecord.fromPartial(e)) || []; + message.validatorSlashEvents = object.validatorSlashEvents?.map(e => ValidatorSlashEventRecord.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/distribution/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/distribution/v1beta1/query.lcd.ts new file mode 100644 index 00000000..2ad66901 --- /dev/null +++ b/packages/codegen/src/cosmos/distribution/v1beta1/query.lcd.ts @@ -0,0 +1,104 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryParamsRequest, QueryParamsResponseSDKType, QueryValidatorOutstandingRewardsRequest, QueryValidatorOutstandingRewardsResponseSDKType, QueryValidatorCommissionRequest, QueryValidatorCommissionResponseSDKType, QueryValidatorSlashesRequest, QueryValidatorSlashesResponseSDKType, QueryDelegationRewardsRequest, QueryDelegationRewardsResponseSDKType, QueryDelegationTotalRewardsRequest, QueryDelegationTotalRewardsResponseSDKType, QueryDelegatorValidatorsRequest, QueryDelegatorValidatorsResponseSDKType, QueryDelegatorWithdrawAddressRequest, QueryDelegatorWithdrawAddressResponseSDKType, QueryCommunityPoolRequest, QueryCommunityPoolResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.params = this.params.bind(this); + this.validatorOutstandingRewards = this.validatorOutstandingRewards.bind(this); + this.validatorCommission = this.validatorCommission.bind(this); + this.validatorSlashes = this.validatorSlashes.bind(this); + this.delegationRewards = this.delegationRewards.bind(this); + this.delegationTotalRewards = this.delegationTotalRewards.bind(this); + this.delegatorValidators = this.delegatorValidators.bind(this); + this.delegatorWithdrawAddress = this.delegatorWithdrawAddress.bind(this); + this.communityPool = this.communityPool.bind(this); + } + /* Params queries params of the distribution module. */ + + + async params(_params: QueryParamsRequest = {}): Promise { + const endpoint = `cosmos/distribution/v1beta1/params`; + return await this.req.get(endpoint); + } + /* ValidatorOutstandingRewards queries rewards of a validator address. */ + + + async validatorOutstandingRewards(params: QueryValidatorOutstandingRewardsRequest): Promise { + const endpoint = `cosmos/distribution/v1beta1/validators/${params.validatorAddress}/outstanding_rewards`; + return await this.req.get(endpoint); + } + /* ValidatorCommission queries accumulated commission for a validator. */ + + + async validatorCommission(params: QueryValidatorCommissionRequest): Promise { + const endpoint = `cosmos/distribution/v1beta1/validators/${params.validatorAddress}/commission`; + return await this.req.get(endpoint); + } + /* ValidatorSlashes queries slash events of a validator. */ + + + async validatorSlashes(params: QueryValidatorSlashesRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.startingHeight !== "undefined") { + options.params.starting_height = params.startingHeight; + } + + if (typeof params?.endingHeight !== "undefined") { + options.params.ending_height = params.endingHeight; + } + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/distribution/v1beta1/validators/${params.validatorAddress}/slashes`; + return await this.req.get(endpoint, options); + } + /* DelegationRewards queries the total rewards accrued by a delegation. */ + + + async delegationRewards(params: QueryDelegationRewardsRequest): Promise { + const endpoint = `cosmos/distribution/v1beta1/delegators/${params.delegatorAddress}/rewards/${params.validatorAddress}`; + return await this.req.get(endpoint); + } + /* DelegationTotalRewards queries the total rewards accrued by a each + validator. */ + + + async delegationTotalRewards(params: QueryDelegationTotalRewardsRequest): Promise { + const endpoint = `cosmos/distribution/v1beta1/delegators/${params.delegatorAddress}/rewards`; + return await this.req.get(endpoint); + } + /* DelegatorValidators queries the validators of a delegator. */ + + + async delegatorValidators(params: QueryDelegatorValidatorsRequest): Promise { + const endpoint = `cosmos/distribution/v1beta1/delegators/${params.delegatorAddress}/validators`; + return await this.req.get(endpoint); + } + /* DelegatorWithdrawAddress queries withdraw address of a delegator. */ + + + async delegatorWithdrawAddress(params: QueryDelegatorWithdrawAddressRequest): Promise { + const endpoint = `cosmos/distribution/v1beta1/delegators/${params.delegatorAddress}/withdraw_address`; + return await this.req.get(endpoint); + } + /* CommunityPool queries the community pool coins. */ + + + async communityPool(_params: QueryCommunityPoolRequest = {}): Promise { + const endpoint = `cosmos/distribution/v1beta1/community_pool`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/distribution/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/distribution/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..539882a1 --- /dev/null +++ b/packages/codegen/src/cosmos/distribution/v1beta1/query.rpc.Query.ts @@ -0,0 +1,150 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryParamsRequest, QueryParamsResponse, QueryValidatorOutstandingRewardsRequest, QueryValidatorOutstandingRewardsResponse, QueryValidatorCommissionRequest, QueryValidatorCommissionResponse, QueryValidatorSlashesRequest, QueryValidatorSlashesResponse, QueryDelegationRewardsRequest, QueryDelegationRewardsResponse, QueryDelegationTotalRewardsRequest, QueryDelegationTotalRewardsResponse, QueryDelegatorValidatorsRequest, QueryDelegatorValidatorsResponse, QueryDelegatorWithdrawAddressRequest, QueryDelegatorWithdrawAddressResponse, QueryCommunityPoolRequest, QueryCommunityPoolResponse } from "./query"; +/** Query defines the gRPC querier service for distribution module. */ + +export interface Query { + /** Params queries params of the distribution module. */ + params(request?: QueryParamsRequest): Promise; + /** ValidatorOutstandingRewards queries rewards of a validator address. */ + + validatorOutstandingRewards(request: QueryValidatorOutstandingRewardsRequest): Promise; + /** ValidatorCommission queries accumulated commission for a validator. */ + + validatorCommission(request: QueryValidatorCommissionRequest): Promise; + /** ValidatorSlashes queries slash events of a validator. */ + + validatorSlashes(request: QueryValidatorSlashesRequest): Promise; + /** DelegationRewards queries the total rewards accrued by a delegation. */ + + delegationRewards(request: QueryDelegationRewardsRequest): Promise; + /** + * DelegationTotalRewards queries the total rewards accrued by a each + * validator. + */ + + delegationTotalRewards(request: QueryDelegationTotalRewardsRequest): Promise; + /** DelegatorValidators queries the validators of a delegator. */ + + delegatorValidators(request: QueryDelegatorValidatorsRequest): Promise; + /** DelegatorWithdrawAddress queries withdraw address of a delegator. */ + + delegatorWithdrawAddress(request: QueryDelegatorWithdrawAddressRequest): Promise; + /** CommunityPool queries the community pool coins. */ + + communityPool(request?: QueryCommunityPoolRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.params = this.params.bind(this); + this.validatorOutstandingRewards = this.validatorOutstandingRewards.bind(this); + this.validatorCommission = this.validatorCommission.bind(this); + this.validatorSlashes = this.validatorSlashes.bind(this); + this.delegationRewards = this.delegationRewards.bind(this); + this.delegationTotalRewards = this.delegationTotalRewards.bind(this); + this.delegatorValidators = this.delegatorValidators.bind(this); + this.delegatorWithdrawAddress = this.delegatorWithdrawAddress.bind(this); + this.communityPool = this.communityPool.bind(this); + } + + params(request: QueryParamsRequest = {}): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + validatorOutstandingRewards(request: QueryValidatorOutstandingRewardsRequest): Promise { + const data = QueryValidatorOutstandingRewardsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Query", "ValidatorOutstandingRewards", data); + return promise.then(data => QueryValidatorOutstandingRewardsResponse.decode(new _m0.Reader(data))); + } + + validatorCommission(request: QueryValidatorCommissionRequest): Promise { + const data = QueryValidatorCommissionRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Query", "ValidatorCommission", data); + return promise.then(data => QueryValidatorCommissionResponse.decode(new _m0.Reader(data))); + } + + validatorSlashes(request: QueryValidatorSlashesRequest): Promise { + const data = QueryValidatorSlashesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Query", "ValidatorSlashes", data); + return promise.then(data => QueryValidatorSlashesResponse.decode(new _m0.Reader(data))); + } + + delegationRewards(request: QueryDelegationRewardsRequest): Promise { + const data = QueryDelegationRewardsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Query", "DelegationRewards", data); + return promise.then(data => QueryDelegationRewardsResponse.decode(new _m0.Reader(data))); + } + + delegationTotalRewards(request: QueryDelegationTotalRewardsRequest): Promise { + const data = QueryDelegationTotalRewardsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Query", "DelegationTotalRewards", data); + return promise.then(data => QueryDelegationTotalRewardsResponse.decode(new _m0.Reader(data))); + } + + delegatorValidators(request: QueryDelegatorValidatorsRequest): Promise { + const data = QueryDelegatorValidatorsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Query", "DelegatorValidators", data); + return promise.then(data => QueryDelegatorValidatorsResponse.decode(new _m0.Reader(data))); + } + + delegatorWithdrawAddress(request: QueryDelegatorWithdrawAddressRequest): Promise { + const data = QueryDelegatorWithdrawAddressRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Query", "DelegatorWithdrawAddress", data); + return promise.then(data => QueryDelegatorWithdrawAddressResponse.decode(new _m0.Reader(data))); + } + + communityPool(request: QueryCommunityPoolRequest = {}): Promise { + const data = QueryCommunityPoolRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Query", "CommunityPool", data); + return promise.then(data => QueryCommunityPoolResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + params(request?: QueryParamsRequest): Promise { + return queryService.params(request); + }, + + validatorOutstandingRewards(request: QueryValidatorOutstandingRewardsRequest): Promise { + return queryService.validatorOutstandingRewards(request); + }, + + validatorCommission(request: QueryValidatorCommissionRequest): Promise { + return queryService.validatorCommission(request); + }, + + validatorSlashes(request: QueryValidatorSlashesRequest): Promise { + return queryService.validatorSlashes(request); + }, + + delegationRewards(request: QueryDelegationRewardsRequest): Promise { + return queryService.delegationRewards(request); + }, + + delegationTotalRewards(request: QueryDelegationTotalRewardsRequest): Promise { + return queryService.delegationTotalRewards(request); + }, + + delegatorValidators(request: QueryDelegatorValidatorsRequest): Promise { + return queryService.delegatorValidators(request); + }, + + delegatorWithdrawAddress(request: QueryDelegatorWithdrawAddressRequest): Promise { + return queryService.delegatorWithdrawAddress(request); + }, + + communityPool(request?: QueryCommunityPoolRequest): Promise { + return queryService.communityPool(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/distribution/v1beta1/query.ts b/packages/codegen/src/cosmos/distribution/v1beta1/query.ts new file mode 100644 index 00000000..a67d26ea --- /dev/null +++ b/packages/codegen/src/cosmos/distribution/v1beta1/query.ts @@ -0,0 +1,1160 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Params, ParamsSDKType, ValidatorOutstandingRewards, ValidatorOutstandingRewardsSDKType, ValidatorAccumulatedCommission, ValidatorAccumulatedCommissionSDKType, ValidatorSlashEvent, ValidatorSlashEventSDKType, DelegationDelegatorReward, DelegationDelegatorRewardSDKType } from "./distribution"; +import { DecCoin, DecCoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequest {} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequestSDKType {} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params?: Params; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +/** + * QueryValidatorOutstandingRewardsRequest is the request type for the + * Query/ValidatorOutstandingRewards RPC method. + */ + +export interface QueryValidatorOutstandingRewardsRequest { + /** validator_address defines the validator address to query for. */ + validatorAddress: string; +} +/** + * QueryValidatorOutstandingRewardsRequest is the request type for the + * Query/ValidatorOutstandingRewards RPC method. + */ + +export interface QueryValidatorOutstandingRewardsRequestSDKType { + validator_address: string; +} +/** + * QueryValidatorOutstandingRewardsResponse is the response type for the + * Query/ValidatorOutstandingRewards RPC method. + */ + +export interface QueryValidatorOutstandingRewardsResponse { + rewards?: ValidatorOutstandingRewards; +} +/** + * QueryValidatorOutstandingRewardsResponse is the response type for the + * Query/ValidatorOutstandingRewards RPC method. + */ + +export interface QueryValidatorOutstandingRewardsResponseSDKType { + rewards?: ValidatorOutstandingRewardsSDKType; +} +/** + * QueryValidatorCommissionRequest is the request type for the + * Query/ValidatorCommission RPC method + */ + +export interface QueryValidatorCommissionRequest { + /** validator_address defines the validator address to query for. */ + validatorAddress: string; +} +/** + * QueryValidatorCommissionRequest is the request type for the + * Query/ValidatorCommission RPC method + */ + +export interface QueryValidatorCommissionRequestSDKType { + validator_address: string; +} +/** + * QueryValidatorCommissionResponse is the response type for the + * Query/ValidatorCommission RPC method + */ + +export interface QueryValidatorCommissionResponse { + /** commission defines the commision the validator received. */ + commission?: ValidatorAccumulatedCommission; +} +/** + * QueryValidatorCommissionResponse is the response type for the + * Query/ValidatorCommission RPC method + */ + +export interface QueryValidatorCommissionResponseSDKType { + commission?: ValidatorAccumulatedCommissionSDKType; +} +/** + * QueryValidatorSlashesRequest is the request type for the + * Query/ValidatorSlashes RPC method + */ + +export interface QueryValidatorSlashesRequest { + /** validator_address defines the validator address to query for. */ + validatorAddress: string; + /** starting_height defines the optional starting height to query the slashes. */ + + startingHeight: Long; + /** starting_height defines the optional ending height to query the slashes. */ + + endingHeight: Long; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** + * QueryValidatorSlashesRequest is the request type for the + * Query/ValidatorSlashes RPC method + */ + +export interface QueryValidatorSlashesRequestSDKType { + validator_address: string; + starting_height: Long; + ending_height: Long; + pagination?: PageRequestSDKType; +} +/** + * QueryValidatorSlashesResponse is the response type for the + * Query/ValidatorSlashes RPC method. + */ + +export interface QueryValidatorSlashesResponse { + /** slashes defines the slashes the validator received. */ + slashes: ValidatorSlashEvent[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryValidatorSlashesResponse is the response type for the + * Query/ValidatorSlashes RPC method. + */ + +export interface QueryValidatorSlashesResponseSDKType { + slashes: ValidatorSlashEventSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryDelegationRewardsRequest is the request type for the + * Query/DelegationRewards RPC method. + */ + +export interface QueryDelegationRewardsRequest { + /** delegator_address defines the delegator address to query for. */ + delegatorAddress: string; + /** validator_address defines the validator address to query for. */ + + validatorAddress: string; +} +/** + * QueryDelegationRewardsRequest is the request type for the + * Query/DelegationRewards RPC method. + */ + +export interface QueryDelegationRewardsRequestSDKType { + delegator_address: string; + validator_address: string; +} +/** + * QueryDelegationRewardsResponse is the response type for the + * Query/DelegationRewards RPC method. + */ + +export interface QueryDelegationRewardsResponse { + /** rewards defines the rewards accrued by a delegation. */ + rewards: DecCoin[]; +} +/** + * QueryDelegationRewardsResponse is the response type for the + * Query/DelegationRewards RPC method. + */ + +export interface QueryDelegationRewardsResponseSDKType { + rewards: DecCoinSDKType[]; +} +/** + * QueryDelegationTotalRewardsRequest is the request type for the + * Query/DelegationTotalRewards RPC method. + */ + +export interface QueryDelegationTotalRewardsRequest { + /** delegator_address defines the delegator address to query for. */ + delegatorAddress: string; +} +/** + * QueryDelegationTotalRewardsRequest is the request type for the + * Query/DelegationTotalRewards RPC method. + */ + +export interface QueryDelegationTotalRewardsRequestSDKType { + delegator_address: string; +} +/** + * QueryDelegationTotalRewardsResponse is the response type for the + * Query/DelegationTotalRewards RPC method. + */ + +export interface QueryDelegationTotalRewardsResponse { + /** rewards defines all the rewards accrued by a delegator. */ + rewards: DelegationDelegatorReward[]; + /** total defines the sum of all the rewards. */ + + total: DecCoin[]; +} +/** + * QueryDelegationTotalRewardsResponse is the response type for the + * Query/DelegationTotalRewards RPC method. + */ + +export interface QueryDelegationTotalRewardsResponseSDKType { + rewards: DelegationDelegatorRewardSDKType[]; + total: DecCoinSDKType[]; +} +/** + * QueryDelegatorValidatorsRequest is the request type for the + * Query/DelegatorValidators RPC method. + */ + +export interface QueryDelegatorValidatorsRequest { + /** delegator_address defines the delegator address to query for. */ + delegatorAddress: string; +} +/** + * QueryDelegatorValidatorsRequest is the request type for the + * Query/DelegatorValidators RPC method. + */ + +export interface QueryDelegatorValidatorsRequestSDKType { + delegator_address: string; +} +/** + * QueryDelegatorValidatorsResponse is the response type for the + * Query/DelegatorValidators RPC method. + */ + +export interface QueryDelegatorValidatorsResponse { + /** validators defines the validators a delegator is delegating for. */ + validators: string[]; +} +/** + * QueryDelegatorValidatorsResponse is the response type for the + * Query/DelegatorValidators RPC method. + */ + +export interface QueryDelegatorValidatorsResponseSDKType { + validators: string[]; +} +/** + * QueryDelegatorWithdrawAddressRequest is the request type for the + * Query/DelegatorWithdrawAddress RPC method. + */ + +export interface QueryDelegatorWithdrawAddressRequest { + /** delegator_address defines the delegator address to query for. */ + delegatorAddress: string; +} +/** + * QueryDelegatorWithdrawAddressRequest is the request type for the + * Query/DelegatorWithdrawAddress RPC method. + */ + +export interface QueryDelegatorWithdrawAddressRequestSDKType { + delegator_address: string; +} +/** + * QueryDelegatorWithdrawAddressResponse is the response type for the + * Query/DelegatorWithdrawAddress RPC method. + */ + +export interface QueryDelegatorWithdrawAddressResponse { + /** withdraw_address defines the delegator address to query for. */ + withdrawAddress: string; +} +/** + * QueryDelegatorWithdrawAddressResponse is the response type for the + * Query/DelegatorWithdrawAddress RPC method. + */ + +export interface QueryDelegatorWithdrawAddressResponseSDKType { + withdraw_address: string; +} +/** + * QueryCommunityPoolRequest is the request type for the Query/CommunityPool RPC + * method. + */ + +export interface QueryCommunityPoolRequest {} +/** + * QueryCommunityPoolRequest is the request type for the Query/CommunityPool RPC + * method. + */ + +export interface QueryCommunityPoolRequestSDKType {} +/** + * QueryCommunityPoolResponse is the response type for the Query/CommunityPool + * RPC method. + */ + +export interface QueryCommunityPoolResponse { + /** pool defines community pool's coins. */ + pool: DecCoin[]; +} +/** + * QueryCommunityPoolResponse is the response type for the Query/CommunityPool + * RPC method. + */ + +export interface QueryCommunityPoolResponseSDKType { + pool: DecCoinSDKType[]; +} + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + params: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseQueryValidatorOutstandingRewardsRequest(): QueryValidatorOutstandingRewardsRequest { + return { + validatorAddress: "" + }; +} + +export const QueryValidatorOutstandingRewardsRequest = { + encode(message: QueryValidatorOutstandingRewardsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddress !== "") { + writer.uint32(10).string(message.validatorAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorOutstandingRewardsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorOutstandingRewardsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorOutstandingRewardsRequest { + const message = createBaseQueryValidatorOutstandingRewardsRequest(); + message.validatorAddress = object.validatorAddress ?? ""; + return message; + } + +}; + +function createBaseQueryValidatorOutstandingRewardsResponse(): QueryValidatorOutstandingRewardsResponse { + return { + rewards: undefined + }; +} + +export const QueryValidatorOutstandingRewardsResponse = { + encode(message: QueryValidatorOutstandingRewardsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.rewards !== undefined) { + ValidatorOutstandingRewards.encode(message.rewards, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorOutstandingRewardsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorOutstandingRewardsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.rewards = ValidatorOutstandingRewards.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorOutstandingRewardsResponse { + const message = createBaseQueryValidatorOutstandingRewardsResponse(); + message.rewards = object.rewards !== undefined && object.rewards !== null ? ValidatorOutstandingRewards.fromPartial(object.rewards) : undefined; + return message; + } + +}; + +function createBaseQueryValidatorCommissionRequest(): QueryValidatorCommissionRequest { + return { + validatorAddress: "" + }; +} + +export const QueryValidatorCommissionRequest = { + encode(message: QueryValidatorCommissionRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddress !== "") { + writer.uint32(10).string(message.validatorAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorCommissionRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorCommissionRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorCommissionRequest { + const message = createBaseQueryValidatorCommissionRequest(); + message.validatorAddress = object.validatorAddress ?? ""; + return message; + } + +}; + +function createBaseQueryValidatorCommissionResponse(): QueryValidatorCommissionResponse { + return { + commission: undefined + }; +} + +export const QueryValidatorCommissionResponse = { + encode(message: QueryValidatorCommissionResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.commission !== undefined) { + ValidatorAccumulatedCommission.encode(message.commission, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorCommissionResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorCommissionResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.commission = ValidatorAccumulatedCommission.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorCommissionResponse { + const message = createBaseQueryValidatorCommissionResponse(); + message.commission = object.commission !== undefined && object.commission !== null ? ValidatorAccumulatedCommission.fromPartial(object.commission) : undefined; + return message; + } + +}; + +function createBaseQueryValidatorSlashesRequest(): QueryValidatorSlashesRequest { + return { + validatorAddress: "", + startingHeight: Long.UZERO, + endingHeight: Long.UZERO, + pagination: undefined + }; +} + +export const QueryValidatorSlashesRequest = { + encode(message: QueryValidatorSlashesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddress !== "") { + writer.uint32(10).string(message.validatorAddress); + } + + if (!message.startingHeight.isZero()) { + writer.uint32(16).uint64(message.startingHeight); + } + + if (!message.endingHeight.isZero()) { + writer.uint32(24).uint64(message.endingHeight); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorSlashesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorSlashesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddress = reader.string(); + break; + + case 2: + message.startingHeight = (reader.uint64() as Long); + break; + + case 3: + message.endingHeight = (reader.uint64() as Long); + break; + + case 4: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorSlashesRequest { + const message = createBaseQueryValidatorSlashesRequest(); + message.validatorAddress = object.validatorAddress ?? ""; + message.startingHeight = object.startingHeight !== undefined && object.startingHeight !== null ? Long.fromValue(object.startingHeight) : Long.UZERO; + message.endingHeight = object.endingHeight !== undefined && object.endingHeight !== null ? Long.fromValue(object.endingHeight) : Long.UZERO; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryValidatorSlashesResponse(): QueryValidatorSlashesResponse { + return { + slashes: [], + pagination: undefined + }; +} + +export const QueryValidatorSlashesResponse = { + encode(message: QueryValidatorSlashesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.slashes) { + ValidatorSlashEvent.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorSlashesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorSlashesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.slashes.push(ValidatorSlashEvent.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorSlashesResponse { + const message = createBaseQueryValidatorSlashesResponse(); + message.slashes = object.slashes?.map(e => ValidatorSlashEvent.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDelegationRewardsRequest(): QueryDelegationRewardsRequest { + return { + delegatorAddress: "", + validatorAddress: "" + }; +} + +export const QueryDelegationRewardsRequest = { + encode(message: QueryDelegationRewardsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.validatorAddress !== "") { + writer.uint32(18).string(message.validatorAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegationRewardsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegationRewardsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.validatorAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegationRewardsRequest { + const message = createBaseQueryDelegationRewardsRequest(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.validatorAddress = object.validatorAddress ?? ""; + return message; + } + +}; + +function createBaseQueryDelegationRewardsResponse(): QueryDelegationRewardsResponse { + return { + rewards: [] + }; +} + +export const QueryDelegationRewardsResponse = { + encode(message: QueryDelegationRewardsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rewards) { + DecCoin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegationRewardsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegationRewardsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.rewards.push(DecCoin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegationRewardsResponse { + const message = createBaseQueryDelegationRewardsResponse(); + message.rewards = object.rewards?.map(e => DecCoin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseQueryDelegationTotalRewardsRequest(): QueryDelegationTotalRewardsRequest { + return { + delegatorAddress: "" + }; +} + +export const QueryDelegationTotalRewardsRequest = { + encode(message: QueryDelegationTotalRewardsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegationTotalRewardsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegationTotalRewardsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegationTotalRewardsRequest { + const message = createBaseQueryDelegationTotalRewardsRequest(); + message.delegatorAddress = object.delegatorAddress ?? ""; + return message; + } + +}; + +function createBaseQueryDelegationTotalRewardsResponse(): QueryDelegationTotalRewardsResponse { + return { + rewards: [], + total: [] + }; +} + +export const QueryDelegationTotalRewardsResponse = { + encode(message: QueryDelegationTotalRewardsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rewards) { + DelegationDelegatorReward.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.total) { + DecCoin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegationTotalRewardsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegationTotalRewardsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.rewards.push(DelegationDelegatorReward.decode(reader, reader.uint32())); + break; + + case 2: + message.total.push(DecCoin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegationTotalRewardsResponse { + const message = createBaseQueryDelegationTotalRewardsResponse(); + message.rewards = object.rewards?.map(e => DelegationDelegatorReward.fromPartial(e)) || []; + message.total = object.total?.map(e => DecCoin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseQueryDelegatorValidatorsRequest(): QueryDelegatorValidatorsRequest { + return { + delegatorAddress: "" + }; +} + +export const QueryDelegatorValidatorsRequest = { + encode(message: QueryDelegatorValidatorsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorValidatorsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegatorValidatorsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegatorValidatorsRequest { + const message = createBaseQueryDelegatorValidatorsRequest(); + message.delegatorAddress = object.delegatorAddress ?? ""; + return message; + } + +}; + +function createBaseQueryDelegatorValidatorsResponse(): QueryDelegatorValidatorsResponse { + return { + validators: [] + }; +} + +export const QueryDelegatorValidatorsResponse = { + encode(message: QueryDelegatorValidatorsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.validators) { + writer.uint32(10).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorValidatorsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegatorValidatorsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validators.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegatorValidatorsResponse { + const message = createBaseQueryDelegatorValidatorsResponse(); + message.validators = object.validators?.map(e => e) || []; + return message; + } + +}; + +function createBaseQueryDelegatorWithdrawAddressRequest(): QueryDelegatorWithdrawAddressRequest { + return { + delegatorAddress: "" + }; +} + +export const QueryDelegatorWithdrawAddressRequest = { + encode(message: QueryDelegatorWithdrawAddressRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorWithdrawAddressRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegatorWithdrawAddressRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegatorWithdrawAddressRequest { + const message = createBaseQueryDelegatorWithdrawAddressRequest(); + message.delegatorAddress = object.delegatorAddress ?? ""; + return message; + } + +}; + +function createBaseQueryDelegatorWithdrawAddressResponse(): QueryDelegatorWithdrawAddressResponse { + return { + withdrawAddress: "" + }; +} + +export const QueryDelegatorWithdrawAddressResponse = { + encode(message: QueryDelegatorWithdrawAddressResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.withdrawAddress !== "") { + writer.uint32(10).string(message.withdrawAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorWithdrawAddressResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegatorWithdrawAddressResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.withdrawAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegatorWithdrawAddressResponse { + const message = createBaseQueryDelegatorWithdrawAddressResponse(); + message.withdrawAddress = object.withdrawAddress ?? ""; + return message; + } + +}; + +function createBaseQueryCommunityPoolRequest(): QueryCommunityPoolRequest { + return {}; +} + +export const QueryCommunityPoolRequest = { + encode(_: QueryCommunityPoolRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCommunityPoolRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCommunityPoolRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryCommunityPoolRequest { + const message = createBaseQueryCommunityPoolRequest(); + return message; + } + +}; + +function createBaseQueryCommunityPoolResponse(): QueryCommunityPoolResponse { + return { + pool: [] + }; +} + +export const QueryCommunityPoolResponse = { + encode(message: QueryCommunityPoolResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.pool) { + DecCoin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCommunityPoolResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCommunityPoolResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pool.push(DecCoin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryCommunityPoolResponse { + const message = createBaseQueryCommunityPoolResponse(); + message.pool = object.pool?.map(e => DecCoin.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/distribution/v1beta1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/distribution/v1beta1/tx.rpc.msg.ts new file mode 100644 index 00000000..64e7e190 --- /dev/null +++ b/packages/codegen/src/cosmos/distribution/v1beta1/tx.rpc.msg.ts @@ -0,0 +1,66 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgSetWithdrawAddress, MsgSetWithdrawAddressResponse, MsgWithdrawDelegatorReward, MsgWithdrawDelegatorRewardResponse, MsgWithdrawValidatorCommission, MsgWithdrawValidatorCommissionResponse, MsgFundCommunityPool, MsgFundCommunityPoolResponse } from "./tx"; +/** Msg defines the distribution Msg service. */ + +export interface Msg { + /** + * SetWithdrawAddress defines a method to change the withdraw address + * for a delegator (or validator self-delegation). + */ + setWithdrawAddress(request: MsgSetWithdrawAddress): Promise; + /** + * WithdrawDelegatorReward defines a method to withdraw rewards of delegator + * from a single validator. + */ + + withdrawDelegatorReward(request: MsgWithdrawDelegatorReward): Promise; + /** + * WithdrawValidatorCommission defines a method to withdraw the + * full commission to the validator address. + */ + + withdrawValidatorCommission(request: MsgWithdrawValidatorCommission): Promise; + /** + * FundCommunityPool defines a method to allow an account to directly + * fund the community pool. + */ + + fundCommunityPool(request: MsgFundCommunityPool): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.setWithdrawAddress = this.setWithdrawAddress.bind(this); + this.withdrawDelegatorReward = this.withdrawDelegatorReward.bind(this); + this.withdrawValidatorCommission = this.withdrawValidatorCommission.bind(this); + this.fundCommunityPool = this.fundCommunityPool.bind(this); + } + + setWithdrawAddress(request: MsgSetWithdrawAddress): Promise { + const data = MsgSetWithdrawAddress.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Msg", "SetWithdrawAddress", data); + return promise.then(data => MsgSetWithdrawAddressResponse.decode(new _m0.Reader(data))); + } + + withdrawDelegatorReward(request: MsgWithdrawDelegatorReward): Promise { + const data = MsgWithdrawDelegatorReward.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Msg", "WithdrawDelegatorReward", data); + return promise.then(data => MsgWithdrawDelegatorRewardResponse.decode(new _m0.Reader(data))); + } + + withdrawValidatorCommission(request: MsgWithdrawValidatorCommission): Promise { + const data = MsgWithdrawValidatorCommission.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Msg", "WithdrawValidatorCommission", data); + return promise.then(data => MsgWithdrawValidatorCommissionResponse.decode(new _m0.Reader(data))); + } + + fundCommunityPool(request: MsgFundCommunityPool): Promise { + const data = MsgFundCommunityPool.encode(request).finish(); + const promise = this.rpc.request("cosmos.distribution.v1beta1.Msg", "FundCommunityPool", data); + return promise.then(data => MsgFundCommunityPoolResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/distribution/v1beta1/tx.ts b/packages/codegen/src/cosmos/distribution/v1beta1/tx.ts new file mode 100644 index 00000000..182b5b5d --- /dev/null +++ b/packages/codegen/src/cosmos/distribution/v1beta1/tx.ts @@ -0,0 +1,473 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * MsgSetWithdrawAddress sets the withdraw address for + * a delegator (or validator self-delegation). + */ + +export interface MsgSetWithdrawAddress { + delegatorAddress: string; + withdrawAddress: string; +} +/** + * MsgSetWithdrawAddress sets the withdraw address for + * a delegator (or validator self-delegation). + */ + +export interface MsgSetWithdrawAddressSDKType { + delegator_address: string; + withdraw_address: string; +} +/** MsgSetWithdrawAddressResponse defines the Msg/SetWithdrawAddress response type. */ + +export interface MsgSetWithdrawAddressResponse {} +/** MsgSetWithdrawAddressResponse defines the Msg/SetWithdrawAddress response type. */ + +export interface MsgSetWithdrawAddressResponseSDKType {} +/** + * MsgWithdrawDelegatorReward represents delegation withdrawal to a delegator + * from a single validator. + */ + +export interface MsgWithdrawDelegatorReward { + delegatorAddress: string; + validatorAddress: string; +} +/** + * MsgWithdrawDelegatorReward represents delegation withdrawal to a delegator + * from a single validator. + */ + +export interface MsgWithdrawDelegatorRewardSDKType { + delegator_address: string; + validator_address: string; +} +/** MsgWithdrawDelegatorRewardResponse defines the Msg/WithdrawDelegatorReward response type. */ + +export interface MsgWithdrawDelegatorRewardResponse { + amount: Coin[]; +} +/** MsgWithdrawDelegatorRewardResponse defines the Msg/WithdrawDelegatorReward response type. */ + +export interface MsgWithdrawDelegatorRewardResponseSDKType { + amount: CoinSDKType[]; +} +/** + * MsgWithdrawValidatorCommission withdraws the full commission to the validator + * address. + */ + +export interface MsgWithdrawValidatorCommission { + validatorAddress: string; +} +/** + * MsgWithdrawValidatorCommission withdraws the full commission to the validator + * address. + */ + +export interface MsgWithdrawValidatorCommissionSDKType { + validator_address: string; +} +/** MsgWithdrawValidatorCommissionResponse defines the Msg/WithdrawValidatorCommission response type. */ + +export interface MsgWithdrawValidatorCommissionResponse { + amount: Coin[]; +} +/** MsgWithdrawValidatorCommissionResponse defines the Msg/WithdrawValidatorCommission response type. */ + +export interface MsgWithdrawValidatorCommissionResponseSDKType { + amount: CoinSDKType[]; +} +/** + * MsgFundCommunityPool allows an account to directly + * fund the community pool. + */ + +export interface MsgFundCommunityPool { + amount: Coin[]; + depositor: string; +} +/** + * MsgFundCommunityPool allows an account to directly + * fund the community pool. + */ + +export interface MsgFundCommunityPoolSDKType { + amount: CoinSDKType[]; + depositor: string; +} +/** MsgFundCommunityPoolResponse defines the Msg/FundCommunityPool response type. */ + +export interface MsgFundCommunityPoolResponse {} +/** MsgFundCommunityPoolResponse defines the Msg/FundCommunityPool response type. */ + +export interface MsgFundCommunityPoolResponseSDKType {} + +function createBaseMsgSetWithdrawAddress(): MsgSetWithdrawAddress { + return { + delegatorAddress: "", + withdrawAddress: "" + }; +} + +export const MsgSetWithdrawAddress = { + encode(message: MsgSetWithdrawAddress, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.withdrawAddress !== "") { + writer.uint32(18).string(message.withdrawAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetWithdrawAddress { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetWithdrawAddress(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.withdrawAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSetWithdrawAddress { + const message = createBaseMsgSetWithdrawAddress(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.withdrawAddress = object.withdrawAddress ?? ""; + return message; + } + +}; + +function createBaseMsgSetWithdrawAddressResponse(): MsgSetWithdrawAddressResponse { + return {}; +} + +export const MsgSetWithdrawAddressResponse = { + encode(_: MsgSetWithdrawAddressResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSetWithdrawAddressResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSetWithdrawAddressResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgSetWithdrawAddressResponse { + const message = createBaseMsgSetWithdrawAddressResponse(); + return message; + } + +}; + +function createBaseMsgWithdrawDelegatorReward(): MsgWithdrawDelegatorReward { + return { + delegatorAddress: "", + validatorAddress: "" + }; +} + +export const MsgWithdrawDelegatorReward = { + encode(message: MsgWithdrawDelegatorReward, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.validatorAddress !== "") { + writer.uint32(18).string(message.validatorAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawDelegatorReward { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgWithdrawDelegatorReward(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.validatorAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgWithdrawDelegatorReward { + const message = createBaseMsgWithdrawDelegatorReward(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.validatorAddress = object.validatorAddress ?? ""; + return message; + } + +}; + +function createBaseMsgWithdrawDelegatorRewardResponse(): MsgWithdrawDelegatorRewardResponse { + return { + amount: [] + }; +} + +export const MsgWithdrawDelegatorRewardResponse = { + encode(message: MsgWithdrawDelegatorRewardResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawDelegatorRewardResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgWithdrawDelegatorRewardResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgWithdrawDelegatorRewardResponse { + const message = createBaseMsgWithdrawDelegatorRewardResponse(); + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgWithdrawValidatorCommission(): MsgWithdrawValidatorCommission { + return { + validatorAddress: "" + }; +} + +export const MsgWithdrawValidatorCommission = { + encode(message: MsgWithdrawValidatorCommission, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddress !== "") { + writer.uint32(10).string(message.validatorAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawValidatorCommission { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgWithdrawValidatorCommission(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgWithdrawValidatorCommission { + const message = createBaseMsgWithdrawValidatorCommission(); + message.validatorAddress = object.validatorAddress ?? ""; + return message; + } + +}; + +function createBaseMsgWithdrawValidatorCommissionResponse(): MsgWithdrawValidatorCommissionResponse { + return { + amount: [] + }; +} + +export const MsgWithdrawValidatorCommissionResponse = { + encode(message: MsgWithdrawValidatorCommissionResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawValidatorCommissionResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgWithdrawValidatorCommissionResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgWithdrawValidatorCommissionResponse { + const message = createBaseMsgWithdrawValidatorCommissionResponse(); + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgFundCommunityPool(): MsgFundCommunityPool { + return { + amount: [], + depositor: "" + }; +} + +export const MsgFundCommunityPool = { + encode(message: MsgFundCommunityPool, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.depositor !== "") { + writer.uint32(18).string(message.depositor); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgFundCommunityPool { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgFundCommunityPool(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + case 2: + message.depositor = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgFundCommunityPool { + const message = createBaseMsgFundCommunityPool(); + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + message.depositor = object.depositor ?? ""; + return message; + } + +}; + +function createBaseMsgFundCommunityPoolResponse(): MsgFundCommunityPoolResponse { + return {}; +} + +export const MsgFundCommunityPoolResponse = { + encode(_: MsgFundCommunityPoolResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgFundCommunityPoolResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgFundCommunityPoolResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgFundCommunityPoolResponse { + const message = createBaseMsgFundCommunityPoolResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/evidence/v1beta1/evidence.ts b/packages/codegen/src/cosmos/evidence/v1beta1/evidence.ts new file mode 100644 index 00000000..7e804c5d --- /dev/null +++ b/packages/codegen/src/cosmos/evidence/v1beta1/evidence.ts @@ -0,0 +1,100 @@ +import { Timestamp } from "../../../google/protobuf/timestamp"; +import { Long, toTimestamp, fromTimestamp, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * Equivocation implements the Evidence interface and defines evidence of double + * signing misbehavior. + */ + +export interface Equivocation { + height: Long; + time?: Date; + power: Long; + consensusAddress: string; +} +/** + * Equivocation implements the Evidence interface and defines evidence of double + * signing misbehavior. + */ + +export interface EquivocationSDKType { + height: Long; + time?: Date; + power: Long; + consensus_address: string; +} + +function createBaseEquivocation(): Equivocation { + return { + height: Long.ZERO, + time: undefined, + power: Long.ZERO, + consensusAddress: "" + }; +} + +export const Equivocation = { + encode(message: Equivocation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).int64(message.height); + } + + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(18).fork()).ldelim(); + } + + if (!message.power.isZero()) { + writer.uint32(24).int64(message.power); + } + + if (message.consensusAddress !== "") { + writer.uint32(34).string(message.consensusAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Equivocation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEquivocation(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.int64() as Long); + break; + + case 2: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 3: + message.power = (reader.int64() as Long); + break; + + case 4: + message.consensusAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Equivocation { + const message = createBaseEquivocation(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.time = object.time ?? undefined; + message.power = object.power !== undefined && object.power !== null ? Long.fromValue(object.power) : Long.ZERO; + message.consensusAddress = object.consensusAddress ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/evidence/v1beta1/genesis.ts b/packages/codegen/src/cosmos/evidence/v1beta1/genesis.ts new file mode 100644 index 00000000..9409525a --- /dev/null +++ b/packages/codegen/src/cosmos/evidence/v1beta1/genesis.ts @@ -0,0 +1,59 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the evidence module's genesis state. */ + +export interface GenesisState { + /** evidence defines all the evidence at genesis. */ + evidence: Any[]; +} +/** GenesisState defines the evidence module's genesis state. */ + +export interface GenesisStateSDKType { + evidence: AnySDKType[]; +} + +function createBaseGenesisState(): GenesisState { + return { + evidence: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.evidence) { + Any.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.evidence.push(Any.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.evidence = object.evidence?.map(e => Any.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/evidence/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/evidence/v1beta1/query.lcd.ts new file mode 100644 index 00000000..ff67beab --- /dev/null +++ b/packages/codegen/src/cosmos/evidence/v1beta1/query.lcd.ts @@ -0,0 +1,41 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryEvidenceRequest, QueryEvidenceResponseSDKType, QueryAllEvidenceRequest, QueryAllEvidenceResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.evidence = this.evidence.bind(this); + this.allEvidence = this.allEvidence.bind(this); + } + /* Evidence queries evidence based on evidence hash. */ + + + async evidence(params: QueryEvidenceRequest): Promise { + const endpoint = `cosmos/evidence/v1beta1/evidence/${params.evidenceHash}`; + return await this.req.get(endpoint); + } + /* AllEvidence queries all evidence. */ + + + async allEvidence(params: QueryAllEvidenceRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/evidence/v1beta1/evidence`; + return await this.req.get(endpoint, options); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/evidence/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/evidence/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..f55807bd --- /dev/null +++ b/packages/codegen/src/cosmos/evidence/v1beta1/query.rpc.Query.ts @@ -0,0 +1,51 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryEvidenceRequest, QueryEvidenceResponse, QueryAllEvidenceRequest, QueryAllEvidenceResponse } from "./query"; +/** Query defines the gRPC querier service. */ + +export interface Query { + /** Evidence queries evidence based on evidence hash. */ + evidence(request: QueryEvidenceRequest): Promise; + /** AllEvidence queries all evidence. */ + + allEvidence(request?: QueryAllEvidenceRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.evidence = this.evidence.bind(this); + this.allEvidence = this.allEvidence.bind(this); + } + + evidence(request: QueryEvidenceRequest): Promise { + const data = QueryEvidenceRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.evidence.v1beta1.Query", "Evidence", data); + return promise.then(data => QueryEvidenceResponse.decode(new _m0.Reader(data))); + } + + allEvidence(request: QueryAllEvidenceRequest = { + pagination: undefined + }): Promise { + const data = QueryAllEvidenceRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.evidence.v1beta1.Query", "AllEvidence", data); + return promise.then(data => QueryAllEvidenceResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + evidence(request: QueryEvidenceRequest): Promise { + return queryService.evidence(request); + }, + + allEvidence(request?: QueryAllEvidenceRequest): Promise { + return queryService.allEvidence(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/evidence/v1beta1/query.ts b/packages/codegen/src/cosmos/evidence/v1beta1/query.ts new file mode 100644 index 00000000..e0b9ccae --- /dev/null +++ b/packages/codegen/src/cosmos/evidence/v1beta1/query.ts @@ -0,0 +1,254 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryEvidenceRequest is the request type for the Query/Evidence RPC method. */ + +export interface QueryEvidenceRequest { + /** evidence_hash defines the hash of the requested evidence. */ + evidenceHash: Uint8Array; +} +/** QueryEvidenceRequest is the request type for the Query/Evidence RPC method. */ + +export interface QueryEvidenceRequestSDKType { + evidence_hash: Uint8Array; +} +/** QueryEvidenceResponse is the response type for the Query/Evidence RPC method. */ + +export interface QueryEvidenceResponse { + /** evidence returns the requested evidence. */ + evidence?: Any; +} +/** QueryEvidenceResponse is the response type for the Query/Evidence RPC method. */ + +export interface QueryEvidenceResponseSDKType { + evidence?: AnySDKType; +} +/** + * QueryEvidenceRequest is the request type for the Query/AllEvidence RPC + * method. + */ + +export interface QueryAllEvidenceRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryEvidenceRequest is the request type for the Query/AllEvidence RPC + * method. + */ + +export interface QueryAllEvidenceRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryAllEvidenceResponse is the response type for the Query/AllEvidence RPC + * method. + */ + +export interface QueryAllEvidenceResponse { + /** evidence returns all evidences. */ + evidence: Any[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryAllEvidenceResponse is the response type for the Query/AllEvidence RPC + * method. + */ + +export interface QueryAllEvidenceResponseSDKType { + evidence: AnySDKType[]; + pagination?: PageResponseSDKType; +} + +function createBaseQueryEvidenceRequest(): QueryEvidenceRequest { + return { + evidenceHash: new Uint8Array() + }; +} + +export const QueryEvidenceRequest = { + encode(message: QueryEvidenceRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.evidenceHash.length !== 0) { + writer.uint32(10).bytes(message.evidenceHash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryEvidenceRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryEvidenceRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.evidenceHash = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryEvidenceRequest { + const message = createBaseQueryEvidenceRequest(); + message.evidenceHash = object.evidenceHash ?? new Uint8Array(); + return message; + } + +}; + +function createBaseQueryEvidenceResponse(): QueryEvidenceResponse { + return { + evidence: undefined + }; +} + +export const QueryEvidenceResponse = { + encode(message: QueryEvidenceResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.evidence !== undefined) { + Any.encode(message.evidence, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryEvidenceResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryEvidenceResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.evidence = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryEvidenceResponse { + const message = createBaseQueryEvidenceResponse(); + message.evidence = object.evidence !== undefined && object.evidence !== null ? Any.fromPartial(object.evidence) : undefined; + return message; + } + +}; + +function createBaseQueryAllEvidenceRequest(): QueryAllEvidenceRequest { + return { + pagination: undefined + }; +} + +export const QueryAllEvidenceRequest = { + encode(message: QueryAllEvidenceRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllEvidenceRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllEvidenceRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllEvidenceRequest { + const message = createBaseQueryAllEvidenceRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryAllEvidenceResponse(): QueryAllEvidenceResponse { + return { + evidence: [], + pagination: undefined + }; +} + +export const QueryAllEvidenceResponse = { + encode(message: QueryAllEvidenceResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.evidence) { + Any.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllEvidenceResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllEvidenceResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.evidence.push(Any.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllEvidenceResponse { + const message = createBaseQueryAllEvidenceResponse(); + message.evidence = object.evidence?.map(e => Any.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/evidence/v1beta1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/evidence/v1beta1/tx.rpc.msg.ts new file mode 100644 index 00000000..afd22359 --- /dev/null +++ b/packages/codegen/src/cosmos/evidence/v1beta1/tx.rpc.msg.ts @@ -0,0 +1,27 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgSubmitEvidence, MsgSubmitEvidenceResponse } from "./tx"; +/** Msg defines the evidence Msg service. */ + +export interface Msg { + /** + * SubmitEvidence submits an arbitrary Evidence of misbehavior such as equivocation or + * counterfactual signing. + */ + submitEvidence(request: MsgSubmitEvidence): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.submitEvidence = this.submitEvidence.bind(this); + } + + submitEvidence(request: MsgSubmitEvidence): Promise { + const data = MsgSubmitEvidence.encode(request).finish(); + const promise = this.rpc.request("cosmos.evidence.v1beta1.Msg", "SubmitEvidence", data); + return promise.then(data => MsgSubmitEvidenceResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/evidence/v1beta1/tx.ts b/packages/codegen/src/cosmos/evidence/v1beta1/tx.ts new file mode 100644 index 00000000..91e09764 --- /dev/null +++ b/packages/codegen/src/cosmos/evidence/v1beta1/tx.ts @@ -0,0 +1,132 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * MsgSubmitEvidence represents a message that supports submitting arbitrary + * Evidence of misbehavior such as equivocation or counterfactual signing. + */ + +export interface MsgSubmitEvidence { + submitter: string; + evidence?: Any; +} +/** + * MsgSubmitEvidence represents a message that supports submitting arbitrary + * Evidence of misbehavior such as equivocation or counterfactual signing. + */ + +export interface MsgSubmitEvidenceSDKType { + submitter: string; + evidence?: AnySDKType; +} +/** MsgSubmitEvidenceResponse defines the Msg/SubmitEvidence response type. */ + +export interface MsgSubmitEvidenceResponse { + /** hash defines the hash of the evidence. */ + hash: Uint8Array; +} +/** MsgSubmitEvidenceResponse defines the Msg/SubmitEvidence response type. */ + +export interface MsgSubmitEvidenceResponseSDKType { + hash: Uint8Array; +} + +function createBaseMsgSubmitEvidence(): MsgSubmitEvidence { + return { + submitter: "", + evidence: undefined + }; +} + +export const MsgSubmitEvidence = { + encode(message: MsgSubmitEvidence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.submitter !== "") { + writer.uint32(10).string(message.submitter); + } + + if (message.evidence !== undefined) { + Any.encode(message.evidence, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitEvidence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSubmitEvidence(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.submitter = reader.string(); + break; + + case 2: + message.evidence = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSubmitEvidence { + const message = createBaseMsgSubmitEvidence(); + message.submitter = object.submitter ?? ""; + message.evidence = object.evidence !== undefined && object.evidence !== null ? Any.fromPartial(object.evidence) : undefined; + return message; + } + +}; + +function createBaseMsgSubmitEvidenceResponse(): MsgSubmitEvidenceResponse { + return { + hash: new Uint8Array() + }; +} + +export const MsgSubmitEvidenceResponse = { + encode(message: MsgSubmitEvidenceResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hash.length !== 0) { + writer.uint32(34).bytes(message.hash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitEvidenceResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSubmitEvidenceResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 4: + message.hash = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSubmitEvidenceResponse { + const message = createBaseMsgSubmitEvidenceResponse(); + message.hash = object.hash ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/feegrant/v1beta1/feegrant.ts b/packages/codegen/src/cosmos/feegrant/v1beta1/feegrant.ts new file mode 100644 index 00000000..1b41f358 --- /dev/null +++ b/packages/codegen/src/cosmos/feegrant/v1beta1/feegrant.ts @@ -0,0 +1,368 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Timestamp } from "../../../google/protobuf/timestamp"; +import { Duration, DurationSDKType } from "../../../google/protobuf/duration"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { toTimestamp, fromTimestamp, DeepPartial } from "../../../helpers"; +/** + * BasicAllowance implements Allowance with a one-time grant of tokens + * that optionally expires. The grantee can use up to SpendLimit to cover fees. + */ + +export interface BasicAllowance { + /** + * spend_limit specifies the maximum amount of tokens that can be spent + * by this allowance and will be updated as tokens are spent. If it is + * empty, there is no spend limit and any amount of coins can be spent. + */ + spendLimit: Coin[]; + /** expiration specifies an optional time when this allowance expires */ + + expiration?: Date; +} +/** + * BasicAllowance implements Allowance with a one-time grant of tokens + * that optionally expires. The grantee can use up to SpendLimit to cover fees. + */ + +export interface BasicAllowanceSDKType { + spend_limit: CoinSDKType[]; + expiration?: Date; +} +/** + * PeriodicAllowance extends Allowance to allow for both a maximum cap, + * as well as a limit per time period. + */ + +export interface PeriodicAllowance { + /** basic specifies a struct of `BasicAllowance` */ + basic?: BasicAllowance; + /** + * period specifies the time duration in which period_spend_limit coins can + * be spent before that allowance is reset + */ + + period?: Duration; + /** + * period_spend_limit specifies the maximum number of coins that can be spent + * in the period + */ + + periodSpendLimit: Coin[]; + /** period_can_spend is the number of coins left to be spent before the period_reset time */ + + periodCanSpend: Coin[]; + /** + * period_reset is the time at which this period resets and a new one begins, + * it is calculated from the start time of the first transaction after the + * last period ended + */ + + periodReset?: Date; +} +/** + * PeriodicAllowance extends Allowance to allow for both a maximum cap, + * as well as a limit per time period. + */ + +export interface PeriodicAllowanceSDKType { + basic?: BasicAllowanceSDKType; + period?: DurationSDKType; + period_spend_limit: CoinSDKType[]; + period_can_spend: CoinSDKType[]; + period_reset?: Date; +} +/** AllowedMsgAllowance creates allowance only for specified message types. */ + +export interface AllowedMsgAllowance { + /** allowance can be any of basic and periodic fee allowance. */ + allowance?: Any; + /** allowed_messages are the messages for which the grantee has the access. */ + + allowedMessages: string[]; +} +/** AllowedMsgAllowance creates allowance only for specified message types. */ + +export interface AllowedMsgAllowanceSDKType { + allowance?: AnySDKType; + allowed_messages: string[]; +} +/** Grant is stored in the KVStore to record a grant with full context */ + +export interface Grant { + /** granter is the address of the user granting an allowance of their funds. */ + granter: string; + /** grantee is the address of the user being granted an allowance of another user's funds. */ + + grantee: string; + /** allowance can be any of basic, periodic, allowed fee allowance. */ + + allowance?: Any; +} +/** Grant is stored in the KVStore to record a grant with full context */ + +export interface GrantSDKType { + granter: string; + grantee: string; + allowance?: AnySDKType; +} + +function createBaseBasicAllowance(): BasicAllowance { + return { + spendLimit: [], + expiration: undefined + }; +} + +export const BasicAllowance = { + encode(message: BasicAllowance, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.spendLimit) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.expiration !== undefined) { + Timestamp.encode(toTimestamp(message.expiration), writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BasicAllowance { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBasicAllowance(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.spendLimit.push(Coin.decode(reader, reader.uint32())); + break; + + case 2: + message.expiration = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BasicAllowance { + const message = createBaseBasicAllowance(); + message.spendLimit = object.spendLimit?.map(e => Coin.fromPartial(e)) || []; + message.expiration = object.expiration ?? undefined; + return message; + } + +}; + +function createBasePeriodicAllowance(): PeriodicAllowance { + return { + basic: undefined, + period: undefined, + periodSpendLimit: [], + periodCanSpend: [], + periodReset: undefined + }; +} + +export const PeriodicAllowance = { + encode(message: PeriodicAllowance, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.basic !== undefined) { + BasicAllowance.encode(message.basic, writer.uint32(10).fork()).ldelim(); + } + + if (message.period !== undefined) { + Duration.encode(message.period, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.periodSpendLimit) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.periodCanSpend) { + Coin.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + if (message.periodReset !== undefined) { + Timestamp.encode(toTimestamp(message.periodReset), writer.uint32(42).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PeriodicAllowance { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePeriodicAllowance(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.basic = BasicAllowance.decode(reader, reader.uint32()); + break; + + case 2: + message.period = Duration.decode(reader, reader.uint32()); + break; + + case 3: + message.periodSpendLimit.push(Coin.decode(reader, reader.uint32())); + break; + + case 4: + message.periodCanSpend.push(Coin.decode(reader, reader.uint32())); + break; + + case 5: + message.periodReset = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PeriodicAllowance { + const message = createBasePeriodicAllowance(); + message.basic = object.basic !== undefined && object.basic !== null ? BasicAllowance.fromPartial(object.basic) : undefined; + message.period = object.period !== undefined && object.period !== null ? Duration.fromPartial(object.period) : undefined; + message.periodSpendLimit = object.periodSpendLimit?.map(e => Coin.fromPartial(e)) || []; + message.periodCanSpend = object.periodCanSpend?.map(e => Coin.fromPartial(e)) || []; + message.periodReset = object.periodReset ?? undefined; + return message; + } + +}; + +function createBaseAllowedMsgAllowance(): AllowedMsgAllowance { + return { + allowance: undefined, + allowedMessages: [] + }; +} + +export const AllowedMsgAllowance = { + encode(message: AllowedMsgAllowance, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowance !== undefined) { + Any.encode(message.allowance, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.allowedMessages) { + writer.uint32(18).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AllowedMsgAllowance { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAllowedMsgAllowance(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.allowance = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.allowedMessages.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AllowedMsgAllowance { + const message = createBaseAllowedMsgAllowance(); + message.allowance = object.allowance !== undefined && object.allowance !== null ? Any.fromPartial(object.allowance) : undefined; + message.allowedMessages = object.allowedMessages?.map(e => e) || []; + return message; + } + +}; + +function createBaseGrant(): Grant { + return { + granter: "", + grantee: "", + allowance: undefined + }; +} + +export const Grant = { + encode(message: Grant, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.granter !== "") { + writer.uint32(10).string(message.granter); + } + + if (message.grantee !== "") { + writer.uint32(18).string(message.grantee); + } + + if (message.allowance !== undefined) { + Any.encode(message.allowance, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Grant { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGrant(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.granter = reader.string(); + break; + + case 2: + message.grantee = reader.string(); + break; + + case 3: + message.allowance = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Grant { + const message = createBaseGrant(); + message.granter = object.granter ?? ""; + message.grantee = object.grantee ?? ""; + message.allowance = object.allowance !== undefined && object.allowance !== null ? Any.fromPartial(object.allowance) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/feegrant/v1beta1/genesis.ts b/packages/codegen/src/cosmos/feegrant/v1beta1/genesis.ts new file mode 100644 index 00000000..2a4eecde --- /dev/null +++ b/packages/codegen/src/cosmos/feegrant/v1beta1/genesis.ts @@ -0,0 +1,58 @@ +import { Grant, GrantSDKType } from "./feegrant"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState contains a set of fee allowances, persisted from the store */ + +export interface GenesisState { + allowances: Grant[]; +} +/** GenesisState contains a set of fee allowances, persisted from the store */ + +export interface GenesisStateSDKType { + allowances: GrantSDKType[]; +} + +function createBaseGenesisState(): GenesisState { + return { + allowances: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.allowances) { + Grant.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.allowances.push(Grant.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.allowances = object.allowances?.map(e => Grant.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/feegrant/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/feegrant/v1beta1/query.lcd.ts new file mode 100644 index 00000000..c486edb7 --- /dev/null +++ b/packages/codegen/src/cosmos/feegrant/v1beta1/query.lcd.ts @@ -0,0 +1,56 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryAllowanceRequest, QueryAllowanceResponseSDKType, QueryAllowancesRequest, QueryAllowancesResponseSDKType, QueryAllowancesByGranterRequest, QueryAllowancesByGranterResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.allowance = this.allowance.bind(this); + this.allowances = this.allowances.bind(this); + this.allowancesByGranter = this.allowancesByGranter.bind(this); + } + /* Allowance returns fee granted to the grantee by the granter. */ + + + async allowance(params: QueryAllowanceRequest): Promise { + const endpoint = `cosmos/feegrant/v1beta1/allowance/${params.granter}/${params.grantee}`; + return await this.req.get(endpoint); + } + /* Allowances returns all the grants for address. */ + + + async allowances(params: QueryAllowancesRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/feegrant/v1beta1/allowances/${params.grantee}`; + return await this.req.get(endpoint, options); + } + /* AllowancesByGranter returns all the grants given by an address + Since v0.46 */ + + + async allowancesByGranter(params: QueryAllowancesByGranterRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/feegrant/v1beta1/issued/${params.granter}`; + return await this.req.get(endpoint, options); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/feegrant/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/feegrant/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..a23808dc --- /dev/null +++ b/packages/codegen/src/cosmos/feegrant/v1beta1/query.rpc.Query.ts @@ -0,0 +1,66 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryAllowanceRequest, QueryAllowanceResponse, QueryAllowancesRequest, QueryAllowancesResponse, QueryAllowancesByGranterRequest, QueryAllowancesByGranterResponse } from "./query"; +/** Query defines the gRPC querier service. */ + +export interface Query { + /** Allowance returns fee granted to the grantee by the granter. */ + allowance(request: QueryAllowanceRequest): Promise; + /** Allowances returns all the grants for address. */ + + allowances(request: QueryAllowancesRequest): Promise; + /** + * AllowancesByGranter returns all the grants given by an address + * Since v0.46 + */ + + allowancesByGranter(request: QueryAllowancesByGranterRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.allowance = this.allowance.bind(this); + this.allowances = this.allowances.bind(this); + this.allowancesByGranter = this.allowancesByGranter.bind(this); + } + + allowance(request: QueryAllowanceRequest): Promise { + const data = QueryAllowanceRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.feegrant.v1beta1.Query", "Allowance", data); + return promise.then(data => QueryAllowanceResponse.decode(new _m0.Reader(data))); + } + + allowances(request: QueryAllowancesRequest): Promise { + const data = QueryAllowancesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.feegrant.v1beta1.Query", "Allowances", data); + return promise.then(data => QueryAllowancesResponse.decode(new _m0.Reader(data))); + } + + allowancesByGranter(request: QueryAllowancesByGranterRequest): Promise { + const data = QueryAllowancesByGranterRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.feegrant.v1beta1.Query", "AllowancesByGranter", data); + return promise.then(data => QueryAllowancesByGranterResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + allowance(request: QueryAllowanceRequest): Promise { + return queryService.allowance(request); + }, + + allowances(request: QueryAllowancesRequest): Promise { + return queryService.allowances(request); + }, + + allowancesByGranter(request: QueryAllowancesByGranterRequest): Promise { + return queryService.allowancesByGranter(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/feegrant/v1beta1/query.ts b/packages/codegen/src/cosmos/feegrant/v1beta1/query.ts new file mode 100644 index 00000000..8566eec8 --- /dev/null +++ b/packages/codegen/src/cosmos/feegrant/v1beta1/query.ts @@ -0,0 +1,408 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Grant, GrantSDKType } from "./feegrant"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryAllowanceRequest is the request type for the Query/Allowance RPC method. */ + +export interface QueryAllowanceRequest { + /** granter is the address of the user granting an allowance of their funds. */ + granter: string; + /** grantee is the address of the user being granted an allowance of another user's funds. */ + + grantee: string; +} +/** QueryAllowanceRequest is the request type for the Query/Allowance RPC method. */ + +export interface QueryAllowanceRequestSDKType { + granter: string; + grantee: string; +} +/** QueryAllowanceResponse is the response type for the Query/Allowance RPC method. */ + +export interface QueryAllowanceResponse { + /** allowance is a allowance granted for grantee by granter. */ + allowance?: Grant; +} +/** QueryAllowanceResponse is the response type for the Query/Allowance RPC method. */ + +export interface QueryAllowanceResponseSDKType { + allowance?: GrantSDKType; +} +/** QueryAllowancesRequest is the request type for the Query/Allowances RPC method. */ + +export interface QueryAllowancesRequest { + grantee: string; + /** pagination defines an pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryAllowancesRequest is the request type for the Query/Allowances RPC method. */ + +export interface QueryAllowancesRequestSDKType { + grantee: string; + pagination?: PageRequestSDKType; +} +/** QueryAllowancesResponse is the response type for the Query/Allowances RPC method. */ + +export interface QueryAllowancesResponse { + /** allowances are allowance's granted for grantee by granter. */ + allowances: Grant[]; + /** pagination defines an pagination for the response. */ + + pagination?: PageResponse; +} +/** QueryAllowancesResponse is the response type for the Query/Allowances RPC method. */ + +export interface QueryAllowancesResponseSDKType { + allowances: GrantSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryAllowancesByGranterRequest is the request type for the Query/AllowancesByGranter RPC method. */ + +export interface QueryAllowancesByGranterRequest { + granter: string; + /** pagination defines an pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryAllowancesByGranterRequest is the request type for the Query/AllowancesByGranter RPC method. */ + +export interface QueryAllowancesByGranterRequestSDKType { + granter: string; + pagination?: PageRequestSDKType; +} +/** QueryAllowancesByGranterResponse is the response type for the Query/AllowancesByGranter RPC method. */ + +export interface QueryAllowancesByGranterResponse { + /** allowances that have been issued by the granter. */ + allowances: Grant[]; + /** pagination defines an pagination for the response. */ + + pagination?: PageResponse; +} +/** QueryAllowancesByGranterResponse is the response type for the Query/AllowancesByGranter RPC method. */ + +export interface QueryAllowancesByGranterResponseSDKType { + allowances: GrantSDKType[]; + pagination?: PageResponseSDKType; +} + +function createBaseQueryAllowanceRequest(): QueryAllowanceRequest { + return { + granter: "", + grantee: "" + }; +} + +export const QueryAllowanceRequest = { + encode(message: QueryAllowanceRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.granter !== "") { + writer.uint32(10).string(message.granter); + } + + if (message.grantee !== "") { + writer.uint32(18).string(message.grantee); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllowanceRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllowanceRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.granter = reader.string(); + break; + + case 2: + message.grantee = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllowanceRequest { + const message = createBaseQueryAllowanceRequest(); + message.granter = object.granter ?? ""; + message.grantee = object.grantee ?? ""; + return message; + } + +}; + +function createBaseQueryAllowanceResponse(): QueryAllowanceResponse { + return { + allowance: undefined + }; +} + +export const QueryAllowanceResponse = { + encode(message: QueryAllowanceResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowance !== undefined) { + Grant.encode(message.allowance, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllowanceResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllowanceResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.allowance = Grant.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllowanceResponse { + const message = createBaseQueryAllowanceResponse(); + message.allowance = object.allowance !== undefined && object.allowance !== null ? Grant.fromPartial(object.allowance) : undefined; + return message; + } + +}; + +function createBaseQueryAllowancesRequest(): QueryAllowancesRequest { + return { + grantee: "", + pagination: undefined + }; +} + +export const QueryAllowancesRequest = { + encode(message: QueryAllowancesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.grantee !== "") { + writer.uint32(10).string(message.grantee); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllowancesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllowancesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.grantee = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllowancesRequest { + const message = createBaseQueryAllowancesRequest(); + message.grantee = object.grantee ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryAllowancesResponse(): QueryAllowancesResponse { + return { + allowances: [], + pagination: undefined + }; +} + +export const QueryAllowancesResponse = { + encode(message: QueryAllowancesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.allowances) { + Grant.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllowancesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllowancesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.allowances.push(Grant.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllowancesResponse { + const message = createBaseQueryAllowancesResponse(); + message.allowances = object.allowances?.map(e => Grant.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryAllowancesByGranterRequest(): QueryAllowancesByGranterRequest { + return { + granter: "", + pagination: undefined + }; +} + +export const QueryAllowancesByGranterRequest = { + encode(message: QueryAllowancesByGranterRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.granter !== "") { + writer.uint32(10).string(message.granter); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllowancesByGranterRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllowancesByGranterRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.granter = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllowancesByGranterRequest { + const message = createBaseQueryAllowancesByGranterRequest(); + message.granter = object.granter ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryAllowancesByGranterResponse(): QueryAllowancesByGranterResponse { + return { + allowances: [], + pagination: undefined + }; +} + +export const QueryAllowancesByGranterResponse = { + encode(message: QueryAllowancesByGranterResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.allowances) { + Grant.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllowancesByGranterResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllowancesByGranterResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.allowances.push(Grant.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllowancesByGranterResponse { + const message = createBaseQueryAllowancesByGranterResponse(); + message.allowances = object.allowances?.map(e => Grant.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/feegrant/v1beta1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/feegrant/v1beta1/tx.rpc.msg.ts new file mode 100644 index 00000000..1bc315e3 --- /dev/null +++ b/packages/codegen/src/cosmos/feegrant/v1beta1/tx.rpc.msg.ts @@ -0,0 +1,40 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgGrantAllowance, MsgGrantAllowanceResponse, MsgRevokeAllowance, MsgRevokeAllowanceResponse } from "./tx"; +/** Msg defines the feegrant msg service. */ + +export interface Msg { + /** + * GrantAllowance grants fee allowance to the grantee on the granter's + * account with the provided expiration time. + */ + grantAllowance(request: MsgGrantAllowance): Promise; + /** + * RevokeAllowance revokes any fee allowance of granter's account that + * has been granted to the grantee. + */ + + revokeAllowance(request: MsgRevokeAllowance): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.grantAllowance = this.grantAllowance.bind(this); + this.revokeAllowance = this.revokeAllowance.bind(this); + } + + grantAllowance(request: MsgGrantAllowance): Promise { + const data = MsgGrantAllowance.encode(request).finish(); + const promise = this.rpc.request("cosmos.feegrant.v1beta1.Msg", "GrantAllowance", data); + return promise.then(data => MsgGrantAllowanceResponse.decode(new _m0.Reader(data))); + } + + revokeAllowance(request: MsgRevokeAllowance): Promise { + const data = MsgRevokeAllowance.encode(request).finish(); + const promise = this.rpc.request("cosmos.feegrant.v1beta1.Msg", "RevokeAllowance", data); + return promise.then(data => MsgRevokeAllowanceResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/feegrant/v1beta1/tx.ts b/packages/codegen/src/cosmos/feegrant/v1beta1/tx.ts new file mode 100644 index 00000000..36b1e000 --- /dev/null +++ b/packages/codegen/src/cosmos/feegrant/v1beta1/tx.ts @@ -0,0 +1,243 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * MsgGrantAllowance adds permission for Grantee to spend up to Allowance + * of fees from the account of Granter. + */ + +export interface MsgGrantAllowance { + /** granter is the address of the user granting an allowance of their funds. */ + granter: string; + /** grantee is the address of the user being granted an allowance of another user's funds. */ + + grantee: string; + /** allowance can be any of basic, periodic, allowed fee allowance. */ + + allowance?: Any; +} +/** + * MsgGrantAllowance adds permission for Grantee to spend up to Allowance + * of fees from the account of Granter. + */ + +export interface MsgGrantAllowanceSDKType { + granter: string; + grantee: string; + allowance?: AnySDKType; +} +/** MsgGrantAllowanceResponse defines the Msg/GrantAllowanceResponse response type. */ + +export interface MsgGrantAllowanceResponse {} +/** MsgGrantAllowanceResponse defines the Msg/GrantAllowanceResponse response type. */ + +export interface MsgGrantAllowanceResponseSDKType {} +/** MsgRevokeAllowance removes any existing Allowance from Granter to Grantee. */ + +export interface MsgRevokeAllowance { + /** granter is the address of the user granting an allowance of their funds. */ + granter: string; + /** grantee is the address of the user being granted an allowance of another user's funds. */ + + grantee: string; +} +/** MsgRevokeAllowance removes any existing Allowance from Granter to Grantee. */ + +export interface MsgRevokeAllowanceSDKType { + granter: string; + grantee: string; +} +/** MsgRevokeAllowanceResponse defines the Msg/RevokeAllowanceResponse response type. */ + +export interface MsgRevokeAllowanceResponse {} +/** MsgRevokeAllowanceResponse defines the Msg/RevokeAllowanceResponse response type. */ + +export interface MsgRevokeAllowanceResponseSDKType {} + +function createBaseMsgGrantAllowance(): MsgGrantAllowance { + return { + granter: "", + grantee: "", + allowance: undefined + }; +} + +export const MsgGrantAllowance = { + encode(message: MsgGrantAllowance, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.granter !== "") { + writer.uint32(10).string(message.granter); + } + + if (message.grantee !== "") { + writer.uint32(18).string(message.grantee); + } + + if (message.allowance !== undefined) { + Any.encode(message.allowance, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgGrantAllowance { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgGrantAllowance(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.granter = reader.string(); + break; + + case 2: + message.grantee = reader.string(); + break; + + case 3: + message.allowance = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgGrantAllowance { + const message = createBaseMsgGrantAllowance(); + message.granter = object.granter ?? ""; + message.grantee = object.grantee ?? ""; + message.allowance = object.allowance !== undefined && object.allowance !== null ? Any.fromPartial(object.allowance) : undefined; + return message; + } + +}; + +function createBaseMsgGrantAllowanceResponse(): MsgGrantAllowanceResponse { + return {}; +} + +export const MsgGrantAllowanceResponse = { + encode(_: MsgGrantAllowanceResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgGrantAllowanceResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgGrantAllowanceResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgGrantAllowanceResponse { + const message = createBaseMsgGrantAllowanceResponse(); + return message; + } + +}; + +function createBaseMsgRevokeAllowance(): MsgRevokeAllowance { + return { + granter: "", + grantee: "" + }; +} + +export const MsgRevokeAllowance = { + encode(message: MsgRevokeAllowance, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.granter !== "") { + writer.uint32(10).string(message.granter); + } + + if (message.grantee !== "") { + writer.uint32(18).string(message.grantee); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeAllowance { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRevokeAllowance(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.granter = reader.string(); + break; + + case 2: + message.grantee = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgRevokeAllowance { + const message = createBaseMsgRevokeAllowance(); + message.granter = object.granter ?? ""; + message.grantee = object.grantee ?? ""; + return message; + } + +}; + +function createBaseMsgRevokeAllowanceResponse(): MsgRevokeAllowanceResponse { + return {}; +} + +export const MsgRevokeAllowanceResponse = { + encode(_: MsgRevokeAllowanceResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeAllowanceResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRevokeAllowanceResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgRevokeAllowanceResponse { + const message = createBaseMsgRevokeAllowanceResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/genutil/v1beta1/genesis.ts b/packages/codegen/src/cosmos/genutil/v1beta1/genesis.ts new file mode 100644 index 00000000..f6926d17 --- /dev/null +++ b/packages/codegen/src/cosmos/genutil/v1beta1/genesis.ts @@ -0,0 +1,58 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the raw genesis transaction in JSON. */ + +export interface GenesisState { + /** gen_txs defines the genesis transactions. */ + genTxs: Uint8Array[]; +} +/** GenesisState defines the raw genesis transaction in JSON. */ + +export interface GenesisStateSDKType { + gen_txs: Uint8Array[]; +} + +function createBaseGenesisState(): GenesisState { + return { + genTxs: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.genTxs) { + writer.uint32(10).bytes(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.genTxs.push(reader.bytes()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.genTxs = object.genTxs?.map(e => e) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1/genesis.ts b/packages/codegen/src/cosmos/gov/v1/genesis.ts new file mode 100644 index 00000000..101340fc --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1/genesis.ts @@ -0,0 +1,143 @@ +import { Deposit, DepositSDKType, Vote, VoteSDKType, Proposal, ProposalSDKType, DepositParams, DepositParamsSDKType, VotingParams, VotingParamsSDKType, TallyParams, TallyParamsSDKType } from "./gov"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisState defines the gov module's genesis state. */ + +export interface GenesisState { + /** starting_proposal_id is the ID of the starting proposal. */ + startingProposalId: Long; + /** deposits defines all the deposits present at genesis. */ + + deposits: Deposit[]; + /** votes defines all the votes present at genesis. */ + + votes: Vote[]; + /** proposals defines all the proposals present at genesis. */ + + proposals: Proposal[]; + /** params defines all the paramaters of related to deposit. */ + + depositParams?: DepositParams; + /** params defines all the paramaters of related to voting. */ + + votingParams?: VotingParams; + /** params defines all the paramaters of related to tally. */ + + tallyParams?: TallyParams; +} +/** GenesisState defines the gov module's genesis state. */ + +export interface GenesisStateSDKType { + starting_proposal_id: Long; + deposits: DepositSDKType[]; + votes: VoteSDKType[]; + proposals: ProposalSDKType[]; + deposit_params?: DepositParamsSDKType; + voting_params?: VotingParamsSDKType; + tally_params?: TallyParamsSDKType; +} + +function createBaseGenesisState(): GenesisState { + return { + startingProposalId: Long.UZERO, + deposits: [], + votes: [], + proposals: [], + depositParams: undefined, + votingParams: undefined, + tallyParams: undefined + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.startingProposalId.isZero()) { + writer.uint32(8).uint64(message.startingProposalId); + } + + for (const v of message.deposits) { + Deposit.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.votes) { + Vote.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.proposals) { + Proposal.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + if (message.depositParams !== undefined) { + DepositParams.encode(message.depositParams, writer.uint32(42).fork()).ldelim(); + } + + if (message.votingParams !== undefined) { + VotingParams.encode(message.votingParams, writer.uint32(50).fork()).ldelim(); + } + + if (message.tallyParams !== undefined) { + TallyParams.encode(message.tallyParams, writer.uint32(58).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.startingProposalId = (reader.uint64() as Long); + break; + + case 2: + message.deposits.push(Deposit.decode(reader, reader.uint32())); + break; + + case 3: + message.votes.push(Vote.decode(reader, reader.uint32())); + break; + + case 4: + message.proposals.push(Proposal.decode(reader, reader.uint32())); + break; + + case 5: + message.depositParams = DepositParams.decode(reader, reader.uint32()); + break; + + case 6: + message.votingParams = VotingParams.decode(reader, reader.uint32()); + break; + + case 7: + message.tallyParams = TallyParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.startingProposalId = object.startingProposalId !== undefined && object.startingProposalId !== null ? Long.fromValue(object.startingProposalId) : Long.UZERO; + message.deposits = object.deposits?.map(e => Deposit.fromPartial(e)) || []; + message.votes = object.votes?.map(e => Vote.fromPartial(e)) || []; + message.proposals = object.proposals?.map(e => Proposal.fromPartial(e)) || []; + message.depositParams = object.depositParams !== undefined && object.depositParams !== null ? DepositParams.fromPartial(object.depositParams) : undefined; + message.votingParams = object.votingParams !== undefined && object.votingParams !== null ? VotingParams.fromPartial(object.votingParams) : undefined; + message.tallyParams = object.tallyParams !== undefined && object.tallyParams !== null ? TallyParams.fromPartial(object.tallyParams) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1/gov.ts b/packages/codegen/src/cosmos/gov/v1/gov.ts new file mode 100644 index 00000000..5d9f109d --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1/gov.ts @@ -0,0 +1,903 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Timestamp } from "../../../google/protobuf/timestamp"; +import { Duration, DurationSDKType } from "../../../google/protobuf/duration"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long, toTimestamp, fromTimestamp } from "../../../helpers"; +/** VoteOption enumerates the valid vote options for a given governance proposal. */ + +export enum VoteOption { + /** VOTE_OPTION_UNSPECIFIED - VOTE_OPTION_UNSPECIFIED defines a no-op vote option. */ + VOTE_OPTION_UNSPECIFIED = 0, + + /** VOTE_OPTION_YES - VOTE_OPTION_YES defines a yes vote option. */ + VOTE_OPTION_YES = 1, + + /** VOTE_OPTION_ABSTAIN - VOTE_OPTION_ABSTAIN defines an abstain vote option. */ + VOTE_OPTION_ABSTAIN = 2, + + /** VOTE_OPTION_NO - VOTE_OPTION_NO defines a no vote option. */ + VOTE_OPTION_NO = 3, + + /** VOTE_OPTION_NO_WITH_VETO - VOTE_OPTION_NO_WITH_VETO defines a no with veto vote option. */ + VOTE_OPTION_NO_WITH_VETO = 4, + UNRECOGNIZED = -1, +} +export const VoteOptionSDKType = VoteOption; +export function voteOptionFromJSON(object: any): VoteOption { + switch (object) { + case 0: + case "VOTE_OPTION_UNSPECIFIED": + return VoteOption.VOTE_OPTION_UNSPECIFIED; + + case 1: + case "VOTE_OPTION_YES": + return VoteOption.VOTE_OPTION_YES; + + case 2: + case "VOTE_OPTION_ABSTAIN": + return VoteOption.VOTE_OPTION_ABSTAIN; + + case 3: + case "VOTE_OPTION_NO": + return VoteOption.VOTE_OPTION_NO; + + case 4: + case "VOTE_OPTION_NO_WITH_VETO": + return VoteOption.VOTE_OPTION_NO_WITH_VETO; + + case -1: + case "UNRECOGNIZED": + default: + return VoteOption.UNRECOGNIZED; + } +} +export function voteOptionToJSON(object: VoteOption): string { + switch (object) { + case VoteOption.VOTE_OPTION_UNSPECIFIED: + return "VOTE_OPTION_UNSPECIFIED"; + + case VoteOption.VOTE_OPTION_YES: + return "VOTE_OPTION_YES"; + + case VoteOption.VOTE_OPTION_ABSTAIN: + return "VOTE_OPTION_ABSTAIN"; + + case VoteOption.VOTE_OPTION_NO: + return "VOTE_OPTION_NO"; + + case VoteOption.VOTE_OPTION_NO_WITH_VETO: + return "VOTE_OPTION_NO_WITH_VETO"; + + case VoteOption.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** ProposalStatus enumerates the valid statuses of a proposal. */ + +export enum ProposalStatus { + /** PROPOSAL_STATUS_UNSPECIFIED - PROPOSAL_STATUS_UNSPECIFIED defines the default propopsal status. */ + PROPOSAL_STATUS_UNSPECIFIED = 0, + + /** + * PROPOSAL_STATUS_DEPOSIT_PERIOD - PROPOSAL_STATUS_DEPOSIT_PERIOD defines a proposal status during the deposit + * period. + */ + PROPOSAL_STATUS_DEPOSIT_PERIOD = 1, + + /** + * PROPOSAL_STATUS_VOTING_PERIOD - PROPOSAL_STATUS_VOTING_PERIOD defines a proposal status during the voting + * period. + */ + PROPOSAL_STATUS_VOTING_PERIOD = 2, + + /** + * PROPOSAL_STATUS_PASSED - PROPOSAL_STATUS_PASSED defines a proposal status of a proposal that has + * passed. + */ + PROPOSAL_STATUS_PASSED = 3, + + /** + * PROPOSAL_STATUS_REJECTED - PROPOSAL_STATUS_REJECTED defines a proposal status of a proposal that has + * been rejected. + */ + PROPOSAL_STATUS_REJECTED = 4, + + /** + * PROPOSAL_STATUS_FAILED - PROPOSAL_STATUS_FAILED defines a proposal status of a proposal that has + * failed. + */ + PROPOSAL_STATUS_FAILED = 5, + UNRECOGNIZED = -1, +} +export const ProposalStatusSDKType = ProposalStatus; +export function proposalStatusFromJSON(object: any): ProposalStatus { + switch (object) { + case 0: + case "PROPOSAL_STATUS_UNSPECIFIED": + return ProposalStatus.PROPOSAL_STATUS_UNSPECIFIED; + + case 1: + case "PROPOSAL_STATUS_DEPOSIT_PERIOD": + return ProposalStatus.PROPOSAL_STATUS_DEPOSIT_PERIOD; + + case 2: + case "PROPOSAL_STATUS_VOTING_PERIOD": + return ProposalStatus.PROPOSAL_STATUS_VOTING_PERIOD; + + case 3: + case "PROPOSAL_STATUS_PASSED": + return ProposalStatus.PROPOSAL_STATUS_PASSED; + + case 4: + case "PROPOSAL_STATUS_REJECTED": + return ProposalStatus.PROPOSAL_STATUS_REJECTED; + + case 5: + case "PROPOSAL_STATUS_FAILED": + return ProposalStatus.PROPOSAL_STATUS_FAILED; + + case -1: + case "UNRECOGNIZED": + default: + return ProposalStatus.UNRECOGNIZED; + } +} +export function proposalStatusToJSON(object: ProposalStatus): string { + switch (object) { + case ProposalStatus.PROPOSAL_STATUS_UNSPECIFIED: + return "PROPOSAL_STATUS_UNSPECIFIED"; + + case ProposalStatus.PROPOSAL_STATUS_DEPOSIT_PERIOD: + return "PROPOSAL_STATUS_DEPOSIT_PERIOD"; + + case ProposalStatus.PROPOSAL_STATUS_VOTING_PERIOD: + return "PROPOSAL_STATUS_VOTING_PERIOD"; + + case ProposalStatus.PROPOSAL_STATUS_PASSED: + return "PROPOSAL_STATUS_PASSED"; + + case ProposalStatus.PROPOSAL_STATUS_REJECTED: + return "PROPOSAL_STATUS_REJECTED"; + + case ProposalStatus.PROPOSAL_STATUS_FAILED: + return "PROPOSAL_STATUS_FAILED"; + + case ProposalStatus.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** WeightedVoteOption defines a unit of vote for vote split. */ + +export interface WeightedVoteOption { + option: VoteOption; + weight: string; +} +/** WeightedVoteOption defines a unit of vote for vote split. */ + +export interface WeightedVoteOptionSDKType { + option: VoteOption; + weight: string; +} +/** + * Deposit defines an amount deposited by an account address to an active + * proposal. + */ + +export interface Deposit { + proposalId: Long; + depositor: string; + amount: Coin[]; +} +/** + * Deposit defines an amount deposited by an account address to an active + * proposal. + */ + +export interface DepositSDKType { + proposal_id: Long; + depositor: string; + amount: CoinSDKType[]; +} +/** Proposal defines the core field members of a governance proposal. */ + +export interface Proposal { + id: Long; + messages: Any[]; + status: ProposalStatus; + /** + * final_tally_result is the final tally result of the proposal. When + * querying a proposal via gRPC, this field is not populated until the + * proposal's voting period has ended. + */ + + finalTallyResult?: TallyResult; + submitTime?: Date; + depositEndTime?: Date; + totalDeposit: Coin[]; + votingStartTime?: Date; + votingEndTime?: Date; + /** metadata is any arbitrary metadata attached to the proposal. */ + + metadata: string; +} +/** Proposal defines the core field members of a governance proposal. */ + +export interface ProposalSDKType { + id: Long; + messages: AnySDKType[]; + status: ProposalStatus; + final_tally_result?: TallyResultSDKType; + submit_time?: Date; + deposit_end_time?: Date; + total_deposit: CoinSDKType[]; + voting_start_time?: Date; + voting_end_time?: Date; + metadata: string; +} +/** TallyResult defines a standard tally for a governance proposal. */ + +export interface TallyResult { + yesCount: string; + abstainCount: string; + noCount: string; + noWithVetoCount: string; +} +/** TallyResult defines a standard tally for a governance proposal. */ + +export interface TallyResultSDKType { + yes_count: string; + abstain_count: string; + no_count: string; + no_with_veto_count: string; +} +/** + * Vote defines a vote on a governance proposal. + * A Vote consists of a proposal ID, the voter, and the vote option. + */ + +export interface Vote { + proposalId: Long; + voter: string; + options: WeightedVoteOption[]; + /** metadata is any arbitrary metadata to attached to the vote. */ + + metadata: string; +} +/** + * Vote defines a vote on a governance proposal. + * A Vote consists of a proposal ID, the voter, and the vote option. + */ + +export interface VoteSDKType { + proposal_id: Long; + voter: string; + options: WeightedVoteOptionSDKType[]; + metadata: string; +} +/** DepositParams defines the params for deposits on governance proposals. */ + +export interface DepositParams { + /** Minimum deposit for a proposal to enter voting period. */ + minDeposit: Coin[]; + /** + * Maximum period for Atom holders to deposit on a proposal. Initial value: 2 + * months. + */ + + maxDepositPeriod?: Duration; +} +/** DepositParams defines the params for deposits on governance proposals. */ + +export interface DepositParamsSDKType { + min_deposit: CoinSDKType[]; + max_deposit_period?: DurationSDKType; +} +/** VotingParams defines the params for voting on governance proposals. */ + +export interface VotingParams { + /** Length of the voting period. */ + votingPeriod?: Duration; +} +/** VotingParams defines the params for voting on governance proposals. */ + +export interface VotingParamsSDKType { + voting_period?: DurationSDKType; +} +/** TallyParams defines the params for tallying votes on governance proposals. */ + +export interface TallyParams { + /** + * Minimum percentage of total stake needed to vote for a result to be + * considered valid. + */ + quorum: string; + /** Minimum proportion of Yes votes for proposal to pass. Default value: 0.5. */ + + threshold: string; + /** + * Minimum value of Veto votes to Total votes ratio for proposal to be + * vetoed. Default value: 1/3. + */ + + vetoThreshold: string; +} +/** TallyParams defines the params for tallying votes on governance proposals. */ + +export interface TallyParamsSDKType { + quorum: string; + threshold: string; + veto_threshold: string; +} + +function createBaseWeightedVoteOption(): WeightedVoteOption { + return { + option: 0, + weight: "" + }; +} + +export const WeightedVoteOption = { + encode(message: WeightedVoteOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.option !== 0) { + writer.uint32(8).int32(message.option); + } + + if (message.weight !== "") { + writer.uint32(18).string(message.weight); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): WeightedVoteOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseWeightedVoteOption(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.option = (reader.int32() as any); + break; + + case 2: + message.weight = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): WeightedVoteOption { + const message = createBaseWeightedVoteOption(); + message.option = object.option ?? 0; + message.weight = object.weight ?? ""; + return message; + } + +}; + +function createBaseDeposit(): Deposit { + return { + proposalId: Long.UZERO, + depositor: "", + amount: [] + }; +} + +export const Deposit = { + encode(message: Deposit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.depositor !== "") { + writer.uint32(18).string(message.depositor); + } + + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Deposit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDeposit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.depositor = reader.string(); + break; + + case 3: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Deposit { + const message = createBaseDeposit(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.depositor = object.depositor ?? ""; + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseProposal(): Proposal { + return { + id: Long.UZERO, + messages: [], + status: 0, + finalTallyResult: undefined, + submitTime: undefined, + depositEndTime: undefined, + totalDeposit: [], + votingStartTime: undefined, + votingEndTime: undefined, + metadata: "" + }; +} + +export const Proposal = { + encode(message: Proposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.id.isZero()) { + writer.uint32(8).uint64(message.id); + } + + for (const v of message.messages) { + Any.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.status !== 0) { + writer.uint32(24).int32(message.status); + } + + if (message.finalTallyResult !== undefined) { + TallyResult.encode(message.finalTallyResult, writer.uint32(34).fork()).ldelim(); + } + + if (message.submitTime !== undefined) { + Timestamp.encode(toTimestamp(message.submitTime), writer.uint32(42).fork()).ldelim(); + } + + if (message.depositEndTime !== undefined) { + Timestamp.encode(toTimestamp(message.depositEndTime), writer.uint32(50).fork()).ldelim(); + } + + for (const v of message.totalDeposit) { + Coin.encode(v!, writer.uint32(58).fork()).ldelim(); + } + + if (message.votingStartTime !== undefined) { + Timestamp.encode(toTimestamp(message.votingStartTime), writer.uint32(66).fork()).ldelim(); + } + + if (message.votingEndTime !== undefined) { + Timestamp.encode(toTimestamp(message.votingEndTime), writer.uint32(74).fork()).ldelim(); + } + + if (message.metadata !== "") { + writer.uint32(82).string(message.metadata); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Proposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.id = (reader.uint64() as Long); + break; + + case 2: + message.messages.push(Any.decode(reader, reader.uint32())); + break; + + case 3: + message.status = (reader.int32() as any); + break; + + case 4: + message.finalTallyResult = TallyResult.decode(reader, reader.uint32()); + break; + + case 5: + message.submitTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 6: + message.depositEndTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 7: + message.totalDeposit.push(Coin.decode(reader, reader.uint32())); + break; + + case 8: + message.votingStartTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 9: + message.votingEndTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 10: + message.metadata = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Proposal { + const message = createBaseProposal(); + message.id = object.id !== undefined && object.id !== null ? Long.fromValue(object.id) : Long.UZERO; + message.messages = object.messages?.map(e => Any.fromPartial(e)) || []; + message.status = object.status ?? 0; + message.finalTallyResult = object.finalTallyResult !== undefined && object.finalTallyResult !== null ? TallyResult.fromPartial(object.finalTallyResult) : undefined; + message.submitTime = object.submitTime ?? undefined; + message.depositEndTime = object.depositEndTime ?? undefined; + message.totalDeposit = object.totalDeposit?.map(e => Coin.fromPartial(e)) || []; + message.votingStartTime = object.votingStartTime ?? undefined; + message.votingEndTime = object.votingEndTime ?? undefined; + message.metadata = object.metadata ?? ""; + return message; + } + +}; + +function createBaseTallyResult(): TallyResult { + return { + yesCount: "", + abstainCount: "", + noCount: "", + noWithVetoCount: "" + }; +} + +export const TallyResult = { + encode(message: TallyResult, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.yesCount !== "") { + writer.uint32(10).string(message.yesCount); + } + + if (message.abstainCount !== "") { + writer.uint32(18).string(message.abstainCount); + } + + if (message.noCount !== "") { + writer.uint32(26).string(message.noCount); + } + + if (message.noWithVetoCount !== "") { + writer.uint32(34).string(message.noWithVetoCount); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TallyResult { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTallyResult(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.yesCount = reader.string(); + break; + + case 2: + message.abstainCount = reader.string(); + break; + + case 3: + message.noCount = reader.string(); + break; + + case 4: + message.noWithVetoCount = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TallyResult { + const message = createBaseTallyResult(); + message.yesCount = object.yesCount ?? ""; + message.abstainCount = object.abstainCount ?? ""; + message.noCount = object.noCount ?? ""; + message.noWithVetoCount = object.noWithVetoCount ?? ""; + return message; + } + +}; + +function createBaseVote(): Vote { + return { + proposalId: Long.UZERO, + voter: "", + options: [], + metadata: "" + }; +} + +export const Vote = { + encode(message: Vote, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + for (const v of message.options) { + WeightedVoteOption.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + if (message.metadata !== "") { + writer.uint32(42).string(message.metadata); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Vote { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVote(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.voter = reader.string(); + break; + + case 4: + message.options.push(WeightedVoteOption.decode(reader, reader.uint32())); + break; + + case 5: + message.metadata = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Vote { + const message = createBaseVote(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.voter = object.voter ?? ""; + message.options = object.options?.map(e => WeightedVoteOption.fromPartial(e)) || []; + message.metadata = object.metadata ?? ""; + return message; + } + +}; + +function createBaseDepositParams(): DepositParams { + return { + minDeposit: [], + maxDepositPeriod: undefined + }; +} + +export const DepositParams = { + encode(message: DepositParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.minDeposit) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.maxDepositPeriod !== undefined) { + Duration.encode(message.maxDepositPeriod, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DepositParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDepositParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.minDeposit.push(Coin.decode(reader, reader.uint32())); + break; + + case 2: + message.maxDepositPeriod = Duration.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DepositParams { + const message = createBaseDepositParams(); + message.minDeposit = object.minDeposit?.map(e => Coin.fromPartial(e)) || []; + message.maxDepositPeriod = object.maxDepositPeriod !== undefined && object.maxDepositPeriod !== null ? Duration.fromPartial(object.maxDepositPeriod) : undefined; + return message; + } + +}; + +function createBaseVotingParams(): VotingParams { + return { + votingPeriod: undefined + }; +} + +export const VotingParams = { + encode(message: VotingParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.votingPeriod !== undefined) { + Duration.encode(message.votingPeriod, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VotingParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVotingParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.votingPeriod = Duration.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): VotingParams { + const message = createBaseVotingParams(); + message.votingPeriod = object.votingPeriod !== undefined && object.votingPeriod !== null ? Duration.fromPartial(object.votingPeriod) : undefined; + return message; + } + +}; + +function createBaseTallyParams(): TallyParams { + return { + quorum: "", + threshold: "", + vetoThreshold: "" + }; +} + +export const TallyParams = { + encode(message: TallyParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.quorum !== "") { + writer.uint32(10).string(message.quorum); + } + + if (message.threshold !== "") { + writer.uint32(18).string(message.threshold); + } + + if (message.vetoThreshold !== "") { + writer.uint32(26).string(message.vetoThreshold); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TallyParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTallyParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.quorum = reader.string(); + break; + + case 2: + message.threshold = reader.string(); + break; + + case 3: + message.vetoThreshold = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TallyParams { + const message = createBaseTallyParams(); + message.quorum = object.quorum ?? ""; + message.threshold = object.threshold ?? ""; + message.vetoThreshold = object.vetoThreshold ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1/query.lcd.ts b/packages/codegen/src/cosmos/gov/v1/query.lcd.ts new file mode 100644 index 00000000..c9d07eb7 --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1/query.lcd.ts @@ -0,0 +1,115 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryProposalRequest, QueryProposalResponseSDKType, QueryProposalsRequest, QueryProposalsResponseSDKType, QueryVoteRequest, QueryVoteResponseSDKType, QueryVotesRequest, QueryVotesResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType, QueryDepositRequest, QueryDepositResponseSDKType, QueryDepositsRequest, QueryDepositsResponseSDKType, QueryTallyResultRequest, QueryTallyResultResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.proposal = this.proposal.bind(this); + this.proposals = this.proposals.bind(this); + this.vote = this.vote.bind(this); + this.votes = this.votes.bind(this); + this.params = this.params.bind(this); + this.deposit = this.deposit.bind(this); + this.deposits = this.deposits.bind(this); + this.tallyResult = this.tallyResult.bind(this); + } + /* Proposal queries proposal details based on ProposalID. */ + + + async proposal(params: QueryProposalRequest): Promise { + const endpoint = `cosmos/gov/v1/proposals/${params.proposalId}`; + return await this.req.get(endpoint); + } + /* Proposals queries all proposals based on given status. */ + + + async proposals(params: QueryProposalsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.proposalStatus !== "undefined") { + options.params.proposal_status = params.proposalStatus; + } + + if (typeof params?.voter !== "undefined") { + options.params.voter = params.voter; + } + + if (typeof params?.depositor !== "undefined") { + options.params.depositor = params.depositor; + } + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/gov/v1/proposals`; + return await this.req.get(endpoint, options); + } + /* Vote queries voted information based on proposalID, voterAddr. */ + + + async vote(params: QueryVoteRequest): Promise { + const endpoint = `cosmos/gov/v1/proposals/${params.proposalId}/votes/${params.voter}`; + return await this.req.get(endpoint); + } + /* Votes queries votes of a given proposal. */ + + + async votes(params: QueryVotesRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/gov/v1/proposals/${params.proposalId}/votes`; + return await this.req.get(endpoint, options); + } + /* Params queries all parameters of the gov module. */ + + + async params(params: QueryParamsRequest): Promise { + const endpoint = `cosmos/gov/v1/params/${params.paramsType}`; + return await this.req.get(endpoint); + } + /* Deposit queries single deposit information based proposalID, depositAddr. */ + + + async deposit(params: QueryDepositRequest): Promise { + const endpoint = `cosmos/gov/v1/proposals/${params.proposalId}/deposits/${params.depositor}`; + return await this.req.get(endpoint); + } + /* Deposits queries all deposits of a single proposal. */ + + + async deposits(params: QueryDepositsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/gov/v1/proposals/${params.proposalId}/deposits`; + return await this.req.get(endpoint, options); + } + /* TallyResult queries the tally of a proposal vote. */ + + + async tallyResult(params: QueryTallyResultRequest): Promise { + const endpoint = `cosmos/gov/v1/proposals/${params.proposalId}/tally`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1/query.rpc.Query.ts b/packages/codegen/src/cosmos/gov/v1/query.rpc.Query.ts new file mode 100644 index 00000000..35784235 --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1/query.rpc.Query.ts @@ -0,0 +1,133 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryProposalRequest, QueryProposalResponse, QueryProposalsRequest, QueryProposalsResponse, QueryVoteRequest, QueryVoteResponse, QueryVotesRequest, QueryVotesResponse, QueryParamsRequest, QueryParamsResponse, QueryDepositRequest, QueryDepositResponse, QueryDepositsRequest, QueryDepositsResponse, QueryTallyResultRequest, QueryTallyResultResponse } from "./query"; +/** Query defines the gRPC querier service for gov module */ + +export interface Query { + /** Proposal queries proposal details based on ProposalID. */ + proposal(request: QueryProposalRequest): Promise; + /** Proposals queries all proposals based on given status. */ + + proposals(request: QueryProposalsRequest): Promise; + /** Vote queries voted information based on proposalID, voterAddr. */ + + vote(request: QueryVoteRequest): Promise; + /** Votes queries votes of a given proposal. */ + + votes(request: QueryVotesRequest): Promise; + /** Params queries all parameters of the gov module. */ + + params(request: QueryParamsRequest): Promise; + /** Deposit queries single deposit information based proposalID, depositAddr. */ + + deposit(request: QueryDepositRequest): Promise; + /** Deposits queries all deposits of a single proposal. */ + + deposits(request: QueryDepositsRequest): Promise; + /** TallyResult queries the tally of a proposal vote. */ + + tallyResult(request: QueryTallyResultRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.proposal = this.proposal.bind(this); + this.proposals = this.proposals.bind(this); + this.vote = this.vote.bind(this); + this.votes = this.votes.bind(this); + this.params = this.params.bind(this); + this.deposit = this.deposit.bind(this); + this.deposits = this.deposits.bind(this); + this.tallyResult = this.tallyResult.bind(this); + } + + proposal(request: QueryProposalRequest): Promise { + const data = QueryProposalRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Query", "Proposal", data); + return promise.then(data => QueryProposalResponse.decode(new _m0.Reader(data))); + } + + proposals(request: QueryProposalsRequest): Promise { + const data = QueryProposalsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Query", "Proposals", data); + return promise.then(data => QueryProposalsResponse.decode(new _m0.Reader(data))); + } + + vote(request: QueryVoteRequest): Promise { + const data = QueryVoteRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Query", "Vote", data); + return promise.then(data => QueryVoteResponse.decode(new _m0.Reader(data))); + } + + votes(request: QueryVotesRequest): Promise { + const data = QueryVotesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Query", "Votes", data); + return promise.then(data => QueryVotesResponse.decode(new _m0.Reader(data))); + } + + params(request: QueryParamsRequest): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + deposit(request: QueryDepositRequest): Promise { + const data = QueryDepositRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Query", "Deposit", data); + return promise.then(data => QueryDepositResponse.decode(new _m0.Reader(data))); + } + + deposits(request: QueryDepositsRequest): Promise { + const data = QueryDepositsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Query", "Deposits", data); + return promise.then(data => QueryDepositsResponse.decode(new _m0.Reader(data))); + } + + tallyResult(request: QueryTallyResultRequest): Promise { + const data = QueryTallyResultRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Query", "TallyResult", data); + return promise.then(data => QueryTallyResultResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + proposal(request: QueryProposalRequest): Promise { + return queryService.proposal(request); + }, + + proposals(request: QueryProposalsRequest): Promise { + return queryService.proposals(request); + }, + + vote(request: QueryVoteRequest): Promise { + return queryService.vote(request); + }, + + votes(request: QueryVotesRequest): Promise { + return queryService.votes(request); + }, + + params(request: QueryParamsRequest): Promise { + return queryService.params(request); + }, + + deposit(request: QueryDepositRequest): Promise { + return queryService.deposit(request); + }, + + deposits(request: QueryDepositsRequest): Promise { + return queryService.deposits(request); + }, + + tallyResult(request: QueryTallyResultRequest): Promise { + return queryService.tallyResult(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1/query.ts b/packages/codegen/src/cosmos/gov/v1/query.ts new file mode 100644 index 00000000..b1fcdd61 --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1/query.ts @@ -0,0 +1,1074 @@ +import { ProposalStatus, Proposal, ProposalSDKType, Vote, VoteSDKType, VotingParams, VotingParamsSDKType, DepositParams, DepositParamsSDKType, TallyParams, TallyParamsSDKType, Deposit, DepositSDKType, TallyResult, TallyResultSDKType } from "./gov"; +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** QueryProposalRequest is the request type for the Query/Proposal RPC method. */ + +export interface QueryProposalRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; +} +/** QueryProposalRequest is the request type for the Query/Proposal RPC method. */ + +export interface QueryProposalRequestSDKType { + proposal_id: Long; +} +/** QueryProposalResponse is the response type for the Query/Proposal RPC method. */ + +export interface QueryProposalResponse { + proposal?: Proposal; +} +/** QueryProposalResponse is the response type for the Query/Proposal RPC method. */ + +export interface QueryProposalResponseSDKType { + proposal?: ProposalSDKType; +} +/** QueryProposalsRequest is the request type for the Query/Proposals RPC method. */ + +export interface QueryProposalsRequest { + /** proposal_status defines the status of the proposals. */ + proposalStatus: ProposalStatus; + /** voter defines the voter address for the proposals. */ + + voter: string; + /** depositor defines the deposit addresses from the proposals. */ + + depositor: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryProposalsRequest is the request type for the Query/Proposals RPC method. */ + +export interface QueryProposalsRequestSDKType { + proposal_status: ProposalStatus; + voter: string; + depositor: string; + pagination?: PageRequestSDKType; +} +/** + * QueryProposalsResponse is the response type for the Query/Proposals RPC + * method. + */ + +export interface QueryProposalsResponse { + proposals: Proposal[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryProposalsResponse is the response type for the Query/Proposals RPC + * method. + */ + +export interface QueryProposalsResponseSDKType { + proposals: ProposalSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryVoteRequest is the request type for the Query/Vote RPC method. */ + +export interface QueryVoteRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** voter defines the oter address for the proposals. */ + + voter: string; +} +/** QueryVoteRequest is the request type for the Query/Vote RPC method. */ + +export interface QueryVoteRequestSDKType { + proposal_id: Long; + voter: string; +} +/** QueryVoteResponse is the response type for the Query/Vote RPC method. */ + +export interface QueryVoteResponse { + /** vote defined the queried vote. */ + vote?: Vote; +} +/** QueryVoteResponse is the response type for the Query/Vote RPC method. */ + +export interface QueryVoteResponseSDKType { + vote?: VoteSDKType; +} +/** QueryVotesRequest is the request type for the Query/Votes RPC method. */ + +export interface QueryVotesRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryVotesRequest is the request type for the Query/Votes RPC method. */ + +export interface QueryVotesRequestSDKType { + proposal_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryVotesResponse is the response type for the Query/Votes RPC method. */ + +export interface QueryVotesResponse { + /** votes defined the queried votes. */ + votes: Vote[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryVotesResponse is the response type for the Query/Votes RPC method. */ + +export interface QueryVotesResponseSDKType { + votes: VoteSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequest { + /** + * params_type defines which parameters to query for, can be one of "voting", + * "tallying" or "deposit". + */ + paramsType: string; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequestSDKType { + params_type: string; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponse { + /** voting_params defines the parameters related to voting. */ + votingParams?: VotingParams; + /** deposit_params defines the parameters related to deposit. */ + + depositParams?: DepositParams; + /** tally_params defines the parameters related to tally. */ + + tallyParams?: TallyParams; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponseSDKType { + voting_params?: VotingParamsSDKType; + deposit_params?: DepositParamsSDKType; + tally_params?: TallyParamsSDKType; +} +/** QueryDepositRequest is the request type for the Query/Deposit RPC method. */ + +export interface QueryDepositRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** depositor defines the deposit addresses from the proposals. */ + + depositor: string; +} +/** QueryDepositRequest is the request type for the Query/Deposit RPC method. */ + +export interface QueryDepositRequestSDKType { + proposal_id: Long; + depositor: string; +} +/** QueryDepositResponse is the response type for the Query/Deposit RPC method. */ + +export interface QueryDepositResponse { + /** deposit defines the requested deposit. */ + deposit?: Deposit; +} +/** QueryDepositResponse is the response type for the Query/Deposit RPC method. */ + +export interface QueryDepositResponseSDKType { + deposit?: DepositSDKType; +} +/** QueryDepositsRequest is the request type for the Query/Deposits RPC method. */ + +export interface QueryDepositsRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryDepositsRequest is the request type for the Query/Deposits RPC method. */ + +export interface QueryDepositsRequestSDKType { + proposal_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryDepositsResponse is the response type for the Query/Deposits RPC method. */ + +export interface QueryDepositsResponse { + deposits: Deposit[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryDepositsResponse is the response type for the Query/Deposits RPC method. */ + +export interface QueryDepositsResponseSDKType { + deposits: DepositSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryTallyResultRequest is the request type for the Query/Tally RPC method. */ + +export interface QueryTallyResultRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; +} +/** QueryTallyResultRequest is the request type for the Query/Tally RPC method. */ + +export interface QueryTallyResultRequestSDKType { + proposal_id: Long; +} +/** QueryTallyResultResponse is the response type for the Query/Tally RPC method. */ + +export interface QueryTallyResultResponse { + /** tally defines the requested tally. */ + tally?: TallyResult; +} +/** QueryTallyResultResponse is the response type for the Query/Tally RPC method. */ + +export interface QueryTallyResultResponseSDKType { + tally?: TallyResultSDKType; +} + +function createBaseQueryProposalRequest(): QueryProposalRequest { + return { + proposalId: Long.UZERO + }; +} + +export const QueryProposalRequest = { + encode(message: QueryProposalRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryProposalRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryProposalRequest { + const message = createBaseQueryProposalRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryProposalResponse(): QueryProposalResponse { + return { + proposal: undefined + }; +} + +export const QueryProposalResponse = { + encode(message: QueryProposalResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.proposal !== undefined) { + Proposal.encode(message.proposal, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryProposalResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposal = Proposal.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryProposalResponse { + const message = createBaseQueryProposalResponse(); + message.proposal = object.proposal !== undefined && object.proposal !== null ? Proposal.fromPartial(object.proposal) : undefined; + return message; + } + +}; + +function createBaseQueryProposalsRequest(): QueryProposalsRequest { + return { + proposalStatus: 0, + voter: "", + depositor: "", + pagination: undefined + }; +} + +export const QueryProposalsRequest = { + encode(message: QueryProposalsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.proposalStatus !== 0) { + writer.uint32(8).int32(message.proposalStatus); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + if (message.depositor !== "") { + writer.uint32(26).string(message.depositor); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryProposalsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalStatus = (reader.int32() as any); + break; + + case 2: + message.voter = reader.string(); + break; + + case 3: + message.depositor = reader.string(); + break; + + case 4: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryProposalsRequest { + const message = createBaseQueryProposalsRequest(); + message.proposalStatus = object.proposalStatus ?? 0; + message.voter = object.voter ?? ""; + message.depositor = object.depositor ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryProposalsResponse(): QueryProposalsResponse { + return { + proposals: [], + pagination: undefined + }; +} + +export const QueryProposalsResponse = { + encode(message: QueryProposalsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.proposals) { + Proposal.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryProposalsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposals.push(Proposal.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryProposalsResponse { + const message = createBaseQueryProposalsResponse(); + message.proposals = object.proposals?.map(e => Proposal.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryVoteRequest(): QueryVoteRequest { + return { + proposalId: Long.UZERO, + voter: "" + }; +} + +export const QueryVoteRequest = { + encode(message: QueryVoteRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVoteRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVoteRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.voter = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVoteRequest { + const message = createBaseQueryVoteRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.voter = object.voter ?? ""; + return message; + } + +}; + +function createBaseQueryVoteResponse(): QueryVoteResponse { + return { + vote: undefined + }; +} + +export const QueryVoteResponse = { + encode(message: QueryVoteResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vote !== undefined) { + Vote.encode(message.vote, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVoteResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVoteResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.vote = Vote.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVoteResponse { + const message = createBaseQueryVoteResponse(); + message.vote = object.vote !== undefined && object.vote !== null ? Vote.fromPartial(object.vote) : undefined; + return message; + } + +}; + +function createBaseQueryVotesRequest(): QueryVotesRequest { + return { + proposalId: Long.UZERO, + pagination: undefined + }; +} + +export const QueryVotesRequest = { + encode(message: QueryVotesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVotesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVotesRequest { + const message = createBaseQueryVotesRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryVotesResponse(): QueryVotesResponse { + return { + votes: [], + pagination: undefined + }; +} + +export const QueryVotesResponse = { + encode(message: QueryVotesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.votes) { + Vote.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVotesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.votes.push(Vote.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVotesResponse { + const message = createBaseQueryVotesResponse(); + message.votes = object.votes?.map(e => Vote.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return { + paramsType: "" + }; +} + +export const QueryParamsRequest = { + encode(message: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.paramsType !== "") { + writer.uint32(10).string(message.paramsType); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.paramsType = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + message.paramsType = object.paramsType ?? ""; + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + votingParams: undefined, + depositParams: undefined, + tallyParams: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.votingParams !== undefined) { + VotingParams.encode(message.votingParams, writer.uint32(10).fork()).ldelim(); + } + + if (message.depositParams !== undefined) { + DepositParams.encode(message.depositParams, writer.uint32(18).fork()).ldelim(); + } + + if (message.tallyParams !== undefined) { + TallyParams.encode(message.tallyParams, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.votingParams = VotingParams.decode(reader, reader.uint32()); + break; + + case 2: + message.depositParams = DepositParams.decode(reader, reader.uint32()); + break; + + case 3: + message.tallyParams = TallyParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.votingParams = object.votingParams !== undefined && object.votingParams !== null ? VotingParams.fromPartial(object.votingParams) : undefined; + message.depositParams = object.depositParams !== undefined && object.depositParams !== null ? DepositParams.fromPartial(object.depositParams) : undefined; + message.tallyParams = object.tallyParams !== undefined && object.tallyParams !== null ? TallyParams.fromPartial(object.tallyParams) : undefined; + return message; + } + +}; + +function createBaseQueryDepositRequest(): QueryDepositRequest { + return { + proposalId: Long.UZERO, + depositor: "" + }; +} + +export const QueryDepositRequest = { + encode(message: QueryDepositRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.depositor !== "") { + writer.uint32(18).string(message.depositor); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDepositRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.depositor = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDepositRequest { + const message = createBaseQueryDepositRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.depositor = object.depositor ?? ""; + return message; + } + +}; + +function createBaseQueryDepositResponse(): QueryDepositResponse { + return { + deposit: undefined + }; +} + +export const QueryDepositResponse = { + encode(message: QueryDepositResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deposit !== undefined) { + Deposit.encode(message.deposit, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDepositResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.deposit = Deposit.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDepositResponse { + const message = createBaseQueryDepositResponse(); + message.deposit = object.deposit !== undefined && object.deposit !== null ? Deposit.fromPartial(object.deposit) : undefined; + return message; + } + +}; + +function createBaseQueryDepositsRequest(): QueryDepositsRequest { + return { + proposalId: Long.UZERO, + pagination: undefined + }; +} + +export const QueryDepositsRequest = { + encode(message: QueryDepositsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDepositsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDepositsRequest { + const message = createBaseQueryDepositsRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDepositsResponse(): QueryDepositsResponse { + return { + deposits: [], + pagination: undefined + }; +} + +export const QueryDepositsResponse = { + encode(message: QueryDepositsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.deposits) { + Deposit.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDepositsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.deposits.push(Deposit.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDepositsResponse { + const message = createBaseQueryDepositsResponse(); + message.deposits = object.deposits?.map(e => Deposit.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryTallyResultRequest(): QueryTallyResultRequest { + return { + proposalId: Long.UZERO + }; +} + +export const QueryTallyResultRequest = { + encode(message: QueryTallyResultRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTallyResultRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryTallyResultRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryTallyResultRequest { + const message = createBaseQueryTallyResultRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryTallyResultResponse(): QueryTallyResultResponse { + return { + tally: undefined + }; +} + +export const QueryTallyResultResponse = { + encode(message: QueryTallyResultResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tally !== undefined) { + TallyResult.encode(message.tally, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTallyResultResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryTallyResultResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.tally = TallyResult.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryTallyResultResponse { + const message = createBaseQueryTallyResultResponse(); + message.tally = object.tally !== undefined && object.tally !== null ? TallyResult.fromPartial(object.tally) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/gov/v1/tx.rpc.msg.ts new file mode 100644 index 00000000..2bd32ee2 --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1/tx.rpc.msg.ts @@ -0,0 +1,67 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgSubmitProposal, MsgSubmitProposalResponse, MsgExecLegacyContent, MsgExecLegacyContentResponse, MsgVote, MsgVoteResponse, MsgVoteWeighted, MsgVoteWeightedResponse, MsgDeposit, MsgDepositResponse } from "./tx"; +/** Msg defines the gov Msg service. */ + +export interface Msg { + /** SubmitProposal defines a method to create new proposal given a content. */ + submitProposal(request: MsgSubmitProposal): Promise; + /** + * ExecLegacyContent defines a Msg to be in included in a MsgSubmitProposal + * to execute a legacy content-based proposal. + */ + + execLegacyContent(request: MsgExecLegacyContent): Promise; + /** Vote defines a method to add a vote on a specific proposal. */ + + vote(request: MsgVote): Promise; + /** VoteWeighted defines a method to add a weighted vote on a specific proposal. */ + + voteWeighted(request: MsgVoteWeighted): Promise; + /** Deposit defines a method to add deposit on a specific proposal. */ + + deposit(request: MsgDeposit): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.submitProposal = this.submitProposal.bind(this); + this.execLegacyContent = this.execLegacyContent.bind(this); + this.vote = this.vote.bind(this); + this.voteWeighted = this.voteWeighted.bind(this); + this.deposit = this.deposit.bind(this); + } + + submitProposal(request: MsgSubmitProposal): Promise { + const data = MsgSubmitProposal.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Msg", "SubmitProposal", data); + return promise.then(data => MsgSubmitProposalResponse.decode(new _m0.Reader(data))); + } + + execLegacyContent(request: MsgExecLegacyContent): Promise { + const data = MsgExecLegacyContent.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Msg", "ExecLegacyContent", data); + return promise.then(data => MsgExecLegacyContentResponse.decode(new _m0.Reader(data))); + } + + vote(request: MsgVote): Promise { + const data = MsgVote.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Msg", "Vote", data); + return promise.then(data => MsgVoteResponse.decode(new _m0.Reader(data))); + } + + voteWeighted(request: MsgVoteWeighted): Promise { + const data = MsgVoteWeighted.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Msg", "VoteWeighted", data); + return promise.then(data => MsgVoteWeightedResponse.decode(new _m0.Reader(data))); + } + + deposit(request: MsgDeposit): Promise { + const data = MsgDeposit.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1.Msg", "Deposit", data); + return promise.then(data => MsgDepositResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1/tx.ts b/packages/codegen/src/cosmos/gov/v1/tx.ts new file mode 100644 index 00000000..f5f3b511 --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1/tx.ts @@ -0,0 +1,656 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { VoteOption, WeightedVoteOption, WeightedVoteOptionSDKType } from "./gov"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** + * MsgSubmitProposal defines an sdk.Msg type that supports submitting arbitrary + * proposal Content. + */ + +export interface MsgSubmitProposal { + messages: Any[]; + initialDeposit: Coin[]; + proposer: string; + /** metadata is any arbitrary metadata attached to the proposal. */ + + metadata: string; +} +/** + * MsgSubmitProposal defines an sdk.Msg type that supports submitting arbitrary + * proposal Content. + */ + +export interface MsgSubmitProposalSDKType { + messages: AnySDKType[]; + initial_deposit: CoinSDKType[]; + proposer: string; + metadata: string; +} +/** MsgSubmitProposalResponse defines the Msg/SubmitProposal response type. */ + +export interface MsgSubmitProposalResponse { + proposalId: Long; +} +/** MsgSubmitProposalResponse defines the Msg/SubmitProposal response type. */ + +export interface MsgSubmitProposalResponseSDKType { + proposal_id: Long; +} +/** + * MsgExecLegacyContent is used to wrap the legacy content field into a message. + * This ensures backwards compatibility with v1beta1.MsgSubmitProposal. + */ + +export interface MsgExecLegacyContent { + /** content is the proposal's content. */ + content?: Any; + /** authority must be the gov module address. */ + + authority: string; +} +/** + * MsgExecLegacyContent is used to wrap the legacy content field into a message. + * This ensures backwards compatibility with v1beta1.MsgSubmitProposal. + */ + +export interface MsgExecLegacyContentSDKType { + content?: AnySDKType; + authority: string; +} +/** MsgExecLegacyContentResponse defines the Msg/ExecLegacyContent response type. */ + +export interface MsgExecLegacyContentResponse {} +/** MsgExecLegacyContentResponse defines the Msg/ExecLegacyContent response type. */ + +export interface MsgExecLegacyContentResponseSDKType {} +/** MsgVote defines a message to cast a vote. */ + +export interface MsgVote { + proposalId: Long; + voter: string; + option: VoteOption; + metadata: string; +} +/** MsgVote defines a message to cast a vote. */ + +export interface MsgVoteSDKType { + proposal_id: Long; + voter: string; + option: VoteOption; + metadata: string; +} +/** MsgVoteResponse defines the Msg/Vote response type. */ + +export interface MsgVoteResponse {} +/** MsgVoteResponse defines the Msg/Vote response type. */ + +export interface MsgVoteResponseSDKType {} +/** MsgVoteWeighted defines a message to cast a vote. */ + +export interface MsgVoteWeighted { + proposalId: Long; + voter: string; + options: WeightedVoteOption[]; + metadata: string; +} +/** MsgVoteWeighted defines a message to cast a vote. */ + +export interface MsgVoteWeightedSDKType { + proposal_id: Long; + voter: string; + options: WeightedVoteOptionSDKType[]; + metadata: string; +} +/** MsgVoteWeightedResponse defines the Msg/VoteWeighted response type. */ + +export interface MsgVoteWeightedResponse {} +/** MsgVoteWeightedResponse defines the Msg/VoteWeighted response type. */ + +export interface MsgVoteWeightedResponseSDKType {} +/** MsgDeposit defines a message to submit a deposit to an existing proposal. */ + +export interface MsgDeposit { + proposalId: Long; + depositor: string; + amount: Coin[]; +} +/** MsgDeposit defines a message to submit a deposit to an existing proposal. */ + +export interface MsgDepositSDKType { + proposal_id: Long; + depositor: string; + amount: CoinSDKType[]; +} +/** MsgDepositResponse defines the Msg/Deposit response type. */ + +export interface MsgDepositResponse {} +/** MsgDepositResponse defines the Msg/Deposit response type. */ + +export interface MsgDepositResponseSDKType {} + +function createBaseMsgSubmitProposal(): MsgSubmitProposal { + return { + messages: [], + initialDeposit: [], + proposer: "", + metadata: "" + }; +} + +export const MsgSubmitProposal = { + encode(message: MsgSubmitProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.messages) { + Any.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.initialDeposit) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.proposer !== "") { + writer.uint32(26).string(message.proposer); + } + + if (message.metadata !== "") { + writer.uint32(34).string(message.metadata); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSubmitProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.messages.push(Any.decode(reader, reader.uint32())); + break; + + case 2: + message.initialDeposit.push(Coin.decode(reader, reader.uint32())); + break; + + case 3: + message.proposer = reader.string(); + break; + + case 4: + message.metadata = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSubmitProposal { + const message = createBaseMsgSubmitProposal(); + message.messages = object.messages?.map(e => Any.fromPartial(e)) || []; + message.initialDeposit = object.initialDeposit?.map(e => Coin.fromPartial(e)) || []; + message.proposer = object.proposer ?? ""; + message.metadata = object.metadata ?? ""; + return message; + } + +}; + +function createBaseMsgSubmitProposalResponse(): MsgSubmitProposalResponse { + return { + proposalId: Long.UZERO + }; +} + +export const MsgSubmitProposalResponse = { + encode(message: MsgSubmitProposalResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitProposalResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSubmitProposalResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSubmitProposalResponse { + const message = createBaseMsgSubmitProposalResponse(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + return message; + } + +}; + +function createBaseMsgExecLegacyContent(): MsgExecLegacyContent { + return { + content: undefined, + authority: "" + }; +} + +export const MsgExecLegacyContent = { + encode(message: MsgExecLegacyContent, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.content !== undefined) { + Any.encode(message.content, writer.uint32(10).fork()).ldelim(); + } + + if (message.authority !== "") { + writer.uint32(18).string(message.authority); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExecLegacyContent { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgExecLegacyContent(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.content = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.authority = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgExecLegacyContent { + const message = createBaseMsgExecLegacyContent(); + message.content = object.content !== undefined && object.content !== null ? Any.fromPartial(object.content) : undefined; + message.authority = object.authority ?? ""; + return message; + } + +}; + +function createBaseMsgExecLegacyContentResponse(): MsgExecLegacyContentResponse { + return {}; +} + +export const MsgExecLegacyContentResponse = { + encode(_: MsgExecLegacyContentResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExecLegacyContentResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgExecLegacyContentResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgExecLegacyContentResponse { + const message = createBaseMsgExecLegacyContentResponse(); + return message; + } + +}; + +function createBaseMsgVote(): MsgVote { + return { + proposalId: Long.UZERO, + voter: "", + option: 0, + metadata: "" + }; +} + +export const MsgVote = { + encode(message: MsgVote, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + if (message.option !== 0) { + writer.uint32(24).int32(message.option); + } + + if (message.metadata !== "") { + writer.uint32(34).string(message.metadata); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVote { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgVote(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.voter = reader.string(); + break; + + case 3: + message.option = (reader.int32() as any); + break; + + case 4: + message.metadata = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgVote { + const message = createBaseMsgVote(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.voter = object.voter ?? ""; + message.option = object.option ?? 0; + message.metadata = object.metadata ?? ""; + return message; + } + +}; + +function createBaseMsgVoteResponse(): MsgVoteResponse { + return {}; +} + +export const MsgVoteResponse = { + encode(_: MsgVoteResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgVoteResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgVoteResponse { + const message = createBaseMsgVoteResponse(); + return message; + } + +}; + +function createBaseMsgVoteWeighted(): MsgVoteWeighted { + return { + proposalId: Long.UZERO, + voter: "", + options: [], + metadata: "" + }; +} + +export const MsgVoteWeighted = { + encode(message: MsgVoteWeighted, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + for (const v of message.options) { + WeightedVoteOption.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + if (message.metadata !== "") { + writer.uint32(34).string(message.metadata); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteWeighted { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgVoteWeighted(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.voter = reader.string(); + break; + + case 3: + message.options.push(WeightedVoteOption.decode(reader, reader.uint32())); + break; + + case 4: + message.metadata = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgVoteWeighted { + const message = createBaseMsgVoteWeighted(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.voter = object.voter ?? ""; + message.options = object.options?.map(e => WeightedVoteOption.fromPartial(e)) || []; + message.metadata = object.metadata ?? ""; + return message; + } + +}; + +function createBaseMsgVoteWeightedResponse(): MsgVoteWeightedResponse { + return {}; +} + +export const MsgVoteWeightedResponse = { + encode(_: MsgVoteWeightedResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteWeightedResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgVoteWeightedResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgVoteWeightedResponse { + const message = createBaseMsgVoteWeightedResponse(); + return message; + } + +}; + +function createBaseMsgDeposit(): MsgDeposit { + return { + proposalId: Long.UZERO, + depositor: "", + amount: [] + }; +} + +export const MsgDeposit = { + encode(message: MsgDeposit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.depositor !== "") { + writer.uint32(18).string(message.depositor); + } + + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeposit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDeposit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.depositor = reader.string(); + break; + + case 3: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgDeposit { + const message = createBaseMsgDeposit(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.depositor = object.depositor ?? ""; + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgDepositResponse(): MsgDepositResponse { + return {}; +} + +export const MsgDepositResponse = { + encode(_: MsgDepositResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDepositResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDepositResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgDepositResponse { + const message = createBaseMsgDepositResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1beta1/genesis.ts b/packages/codegen/src/cosmos/gov/v1beta1/genesis.ts new file mode 100644 index 00000000..101340fc --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1beta1/genesis.ts @@ -0,0 +1,143 @@ +import { Deposit, DepositSDKType, Vote, VoteSDKType, Proposal, ProposalSDKType, DepositParams, DepositParamsSDKType, VotingParams, VotingParamsSDKType, TallyParams, TallyParamsSDKType } from "./gov"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisState defines the gov module's genesis state. */ + +export interface GenesisState { + /** starting_proposal_id is the ID of the starting proposal. */ + startingProposalId: Long; + /** deposits defines all the deposits present at genesis. */ + + deposits: Deposit[]; + /** votes defines all the votes present at genesis. */ + + votes: Vote[]; + /** proposals defines all the proposals present at genesis. */ + + proposals: Proposal[]; + /** params defines all the paramaters of related to deposit. */ + + depositParams?: DepositParams; + /** params defines all the paramaters of related to voting. */ + + votingParams?: VotingParams; + /** params defines all the paramaters of related to tally. */ + + tallyParams?: TallyParams; +} +/** GenesisState defines the gov module's genesis state. */ + +export interface GenesisStateSDKType { + starting_proposal_id: Long; + deposits: DepositSDKType[]; + votes: VoteSDKType[]; + proposals: ProposalSDKType[]; + deposit_params?: DepositParamsSDKType; + voting_params?: VotingParamsSDKType; + tally_params?: TallyParamsSDKType; +} + +function createBaseGenesisState(): GenesisState { + return { + startingProposalId: Long.UZERO, + deposits: [], + votes: [], + proposals: [], + depositParams: undefined, + votingParams: undefined, + tallyParams: undefined + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.startingProposalId.isZero()) { + writer.uint32(8).uint64(message.startingProposalId); + } + + for (const v of message.deposits) { + Deposit.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.votes) { + Vote.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.proposals) { + Proposal.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + if (message.depositParams !== undefined) { + DepositParams.encode(message.depositParams, writer.uint32(42).fork()).ldelim(); + } + + if (message.votingParams !== undefined) { + VotingParams.encode(message.votingParams, writer.uint32(50).fork()).ldelim(); + } + + if (message.tallyParams !== undefined) { + TallyParams.encode(message.tallyParams, writer.uint32(58).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.startingProposalId = (reader.uint64() as Long); + break; + + case 2: + message.deposits.push(Deposit.decode(reader, reader.uint32())); + break; + + case 3: + message.votes.push(Vote.decode(reader, reader.uint32())); + break; + + case 4: + message.proposals.push(Proposal.decode(reader, reader.uint32())); + break; + + case 5: + message.depositParams = DepositParams.decode(reader, reader.uint32()); + break; + + case 6: + message.votingParams = VotingParams.decode(reader, reader.uint32()); + break; + + case 7: + message.tallyParams = TallyParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.startingProposalId = object.startingProposalId !== undefined && object.startingProposalId !== null ? Long.fromValue(object.startingProposalId) : Long.UZERO; + message.deposits = object.deposits?.map(e => Deposit.fromPartial(e)) || []; + message.votes = object.votes?.map(e => Vote.fromPartial(e)) || []; + message.proposals = object.proposals?.map(e => Proposal.fromPartial(e)) || []; + message.depositParams = object.depositParams !== undefined && object.depositParams !== null ? DepositParams.fromPartial(object.depositParams) : undefined; + message.votingParams = object.votingParams !== undefined && object.votingParams !== null ? VotingParams.fromPartial(object.votingParams) : undefined; + message.tallyParams = object.tallyParams !== undefined && object.tallyParams !== null ? TallyParams.fromPartial(object.tallyParams) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1beta1/gov.ts b/packages/codegen/src/cosmos/gov/v1beta1/gov.ts new file mode 100644 index 00000000..ef121c80 --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1beta1/gov.ts @@ -0,0 +1,980 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Timestamp } from "../../../google/protobuf/timestamp"; +import { Duration, DurationSDKType } from "../../../google/protobuf/duration"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long, toTimestamp, fromTimestamp } from "../../../helpers"; +/** VoteOption enumerates the valid vote options for a given governance proposal. */ + +export enum VoteOption { + /** VOTE_OPTION_UNSPECIFIED - VOTE_OPTION_UNSPECIFIED defines a no-op vote option. */ + VOTE_OPTION_UNSPECIFIED = 0, + + /** VOTE_OPTION_YES - VOTE_OPTION_YES defines a yes vote option. */ + VOTE_OPTION_YES = 1, + + /** VOTE_OPTION_ABSTAIN - VOTE_OPTION_ABSTAIN defines an abstain vote option. */ + VOTE_OPTION_ABSTAIN = 2, + + /** VOTE_OPTION_NO - VOTE_OPTION_NO defines a no vote option. */ + VOTE_OPTION_NO = 3, + + /** VOTE_OPTION_NO_WITH_VETO - VOTE_OPTION_NO_WITH_VETO defines a no with veto vote option. */ + VOTE_OPTION_NO_WITH_VETO = 4, + UNRECOGNIZED = -1, +} +export const VoteOptionSDKType = VoteOption; +export function voteOptionFromJSON(object: any): VoteOption { + switch (object) { + case 0: + case "VOTE_OPTION_UNSPECIFIED": + return VoteOption.VOTE_OPTION_UNSPECIFIED; + + case 1: + case "VOTE_OPTION_YES": + return VoteOption.VOTE_OPTION_YES; + + case 2: + case "VOTE_OPTION_ABSTAIN": + return VoteOption.VOTE_OPTION_ABSTAIN; + + case 3: + case "VOTE_OPTION_NO": + return VoteOption.VOTE_OPTION_NO; + + case 4: + case "VOTE_OPTION_NO_WITH_VETO": + return VoteOption.VOTE_OPTION_NO_WITH_VETO; + + case -1: + case "UNRECOGNIZED": + default: + return VoteOption.UNRECOGNIZED; + } +} +export function voteOptionToJSON(object: VoteOption): string { + switch (object) { + case VoteOption.VOTE_OPTION_UNSPECIFIED: + return "VOTE_OPTION_UNSPECIFIED"; + + case VoteOption.VOTE_OPTION_YES: + return "VOTE_OPTION_YES"; + + case VoteOption.VOTE_OPTION_ABSTAIN: + return "VOTE_OPTION_ABSTAIN"; + + case VoteOption.VOTE_OPTION_NO: + return "VOTE_OPTION_NO"; + + case VoteOption.VOTE_OPTION_NO_WITH_VETO: + return "VOTE_OPTION_NO_WITH_VETO"; + + case VoteOption.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** ProposalStatus enumerates the valid statuses of a proposal. */ + +export enum ProposalStatus { + /** PROPOSAL_STATUS_UNSPECIFIED - PROPOSAL_STATUS_UNSPECIFIED defines the default propopsal status. */ + PROPOSAL_STATUS_UNSPECIFIED = 0, + + /** + * PROPOSAL_STATUS_DEPOSIT_PERIOD - PROPOSAL_STATUS_DEPOSIT_PERIOD defines a proposal status during the deposit + * period. + */ + PROPOSAL_STATUS_DEPOSIT_PERIOD = 1, + + /** + * PROPOSAL_STATUS_VOTING_PERIOD - PROPOSAL_STATUS_VOTING_PERIOD defines a proposal status during the voting + * period. + */ + PROPOSAL_STATUS_VOTING_PERIOD = 2, + + /** + * PROPOSAL_STATUS_PASSED - PROPOSAL_STATUS_PASSED defines a proposal status of a proposal that has + * passed. + */ + PROPOSAL_STATUS_PASSED = 3, + + /** + * PROPOSAL_STATUS_REJECTED - PROPOSAL_STATUS_REJECTED defines a proposal status of a proposal that has + * been rejected. + */ + PROPOSAL_STATUS_REJECTED = 4, + + /** + * PROPOSAL_STATUS_FAILED - PROPOSAL_STATUS_FAILED defines a proposal status of a proposal that has + * failed. + */ + PROPOSAL_STATUS_FAILED = 5, + UNRECOGNIZED = -1, +} +export const ProposalStatusSDKType = ProposalStatus; +export function proposalStatusFromJSON(object: any): ProposalStatus { + switch (object) { + case 0: + case "PROPOSAL_STATUS_UNSPECIFIED": + return ProposalStatus.PROPOSAL_STATUS_UNSPECIFIED; + + case 1: + case "PROPOSAL_STATUS_DEPOSIT_PERIOD": + return ProposalStatus.PROPOSAL_STATUS_DEPOSIT_PERIOD; + + case 2: + case "PROPOSAL_STATUS_VOTING_PERIOD": + return ProposalStatus.PROPOSAL_STATUS_VOTING_PERIOD; + + case 3: + case "PROPOSAL_STATUS_PASSED": + return ProposalStatus.PROPOSAL_STATUS_PASSED; + + case 4: + case "PROPOSAL_STATUS_REJECTED": + return ProposalStatus.PROPOSAL_STATUS_REJECTED; + + case 5: + case "PROPOSAL_STATUS_FAILED": + return ProposalStatus.PROPOSAL_STATUS_FAILED; + + case -1: + case "UNRECOGNIZED": + default: + return ProposalStatus.UNRECOGNIZED; + } +} +export function proposalStatusToJSON(object: ProposalStatus): string { + switch (object) { + case ProposalStatus.PROPOSAL_STATUS_UNSPECIFIED: + return "PROPOSAL_STATUS_UNSPECIFIED"; + + case ProposalStatus.PROPOSAL_STATUS_DEPOSIT_PERIOD: + return "PROPOSAL_STATUS_DEPOSIT_PERIOD"; + + case ProposalStatus.PROPOSAL_STATUS_VOTING_PERIOD: + return "PROPOSAL_STATUS_VOTING_PERIOD"; + + case ProposalStatus.PROPOSAL_STATUS_PASSED: + return "PROPOSAL_STATUS_PASSED"; + + case ProposalStatus.PROPOSAL_STATUS_REJECTED: + return "PROPOSAL_STATUS_REJECTED"; + + case ProposalStatus.PROPOSAL_STATUS_FAILED: + return "PROPOSAL_STATUS_FAILED"; + + case ProposalStatus.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * WeightedVoteOption defines a unit of vote for vote split. + * + * Since: cosmos-sdk 0.43 + */ + +export interface WeightedVoteOption { + option: VoteOption; + weight: string; +} +/** + * WeightedVoteOption defines a unit of vote for vote split. + * + * Since: cosmos-sdk 0.43 + */ + +export interface WeightedVoteOptionSDKType { + option: VoteOption; + weight: string; +} +/** + * TextProposal defines a standard text proposal whose changes need to be + * manually updated in case of approval. + */ + +export interface TextProposal { + title: string; + description: string; +} +/** + * TextProposal defines a standard text proposal whose changes need to be + * manually updated in case of approval. + */ + +export interface TextProposalSDKType { + title: string; + description: string; +} +/** + * Deposit defines an amount deposited by an account address to an active + * proposal. + */ + +export interface Deposit { + proposalId: Long; + depositor: string; + amount: Coin[]; +} +/** + * Deposit defines an amount deposited by an account address to an active + * proposal. + */ + +export interface DepositSDKType { + proposal_id: Long; + depositor: string; + amount: CoinSDKType[]; +} +/** Proposal defines the core field members of a governance proposal. */ + +export interface Proposal { + proposalId: Long; + content?: Any; + status: ProposalStatus; + /** + * final_tally_result is the final tally result of the proposal. When + * querying a proposal via gRPC, this field is not populated until the + * proposal's voting period has ended. + */ + + finalTallyResult?: TallyResult; + submitTime?: Date; + depositEndTime?: Date; + totalDeposit: Coin[]; + votingStartTime?: Date; + votingEndTime?: Date; +} +/** Proposal defines the core field members of a governance proposal. */ + +export interface ProposalSDKType { + proposal_id: Long; + content?: AnySDKType; + status: ProposalStatus; + final_tally_result?: TallyResultSDKType; + submit_time?: Date; + deposit_end_time?: Date; + total_deposit: CoinSDKType[]; + voting_start_time?: Date; + voting_end_time?: Date; +} +/** TallyResult defines a standard tally for a governance proposal. */ + +export interface TallyResult { + yes: string; + abstain: string; + no: string; + noWithVeto: string; +} +/** TallyResult defines a standard tally for a governance proposal. */ + +export interface TallyResultSDKType { + yes: string; + abstain: string; + no: string; + no_with_veto: string; +} +/** + * Vote defines a vote on a governance proposal. + * A Vote consists of a proposal ID, the voter, and the vote option. + */ + +export interface Vote { + proposalId: Long; + voter: string; + /** + * Deprecated: Prefer to use `options` instead. This field is set in queries + * if and only if `len(options) == 1` and that option has weight 1. In all + * other cases, this field will default to VOTE_OPTION_UNSPECIFIED. + */ + + /** @deprecated */ + + option: VoteOption; + /** Since: cosmos-sdk 0.43 */ + + options: WeightedVoteOption[]; +} +/** + * Vote defines a vote on a governance proposal. + * A Vote consists of a proposal ID, the voter, and the vote option. + */ + +export interface VoteSDKType { + proposal_id: Long; + voter: string; + /** @deprecated */ + + option: VoteOption; + options: WeightedVoteOptionSDKType[]; +} +/** DepositParams defines the params for deposits on governance proposals. */ + +export interface DepositParams { + /** Minimum deposit for a proposal to enter voting period. */ + minDeposit: Coin[]; + /** + * Maximum period for Atom holders to deposit on a proposal. Initial value: 2 + * months. + */ + + maxDepositPeriod?: Duration; +} +/** DepositParams defines the params for deposits on governance proposals. */ + +export interface DepositParamsSDKType { + min_deposit: CoinSDKType[]; + max_deposit_period?: DurationSDKType; +} +/** VotingParams defines the params for voting on governance proposals. */ + +export interface VotingParams { + /** Length of the voting period. */ + votingPeriod?: Duration; +} +/** VotingParams defines the params for voting on governance proposals. */ + +export interface VotingParamsSDKType { + voting_period?: DurationSDKType; +} +/** TallyParams defines the params for tallying votes on governance proposals. */ + +export interface TallyParams { + /** + * Minimum percentage of total stake needed to vote for a result to be + * considered valid. + */ + quorum: Uint8Array; + /** Minimum proportion of Yes votes for proposal to pass. Default value: 0.5. */ + + threshold: Uint8Array; + /** + * Minimum value of Veto votes to Total votes ratio for proposal to be + * vetoed. Default value: 1/3. + */ + + vetoThreshold: Uint8Array; +} +/** TallyParams defines the params for tallying votes on governance proposals. */ + +export interface TallyParamsSDKType { + quorum: Uint8Array; + threshold: Uint8Array; + veto_threshold: Uint8Array; +} + +function createBaseWeightedVoteOption(): WeightedVoteOption { + return { + option: 0, + weight: "" + }; +} + +export const WeightedVoteOption = { + encode(message: WeightedVoteOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.option !== 0) { + writer.uint32(8).int32(message.option); + } + + if (message.weight !== "") { + writer.uint32(18).string(message.weight); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): WeightedVoteOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseWeightedVoteOption(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.option = (reader.int32() as any); + break; + + case 2: + message.weight = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): WeightedVoteOption { + const message = createBaseWeightedVoteOption(); + message.option = object.option ?? 0; + message.weight = object.weight ?? ""; + return message; + } + +}; + +function createBaseTextProposal(): TextProposal { + return { + title: "", + description: "" + }; +} + +export const TextProposal = { + encode(message: TextProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TextProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTextProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TextProposal { + const message = createBaseTextProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + return message; + } + +}; + +function createBaseDeposit(): Deposit { + return { + proposalId: Long.UZERO, + depositor: "", + amount: [] + }; +} + +export const Deposit = { + encode(message: Deposit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.depositor !== "") { + writer.uint32(18).string(message.depositor); + } + + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Deposit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDeposit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.depositor = reader.string(); + break; + + case 3: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Deposit { + const message = createBaseDeposit(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.depositor = object.depositor ?? ""; + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseProposal(): Proposal { + return { + proposalId: Long.UZERO, + content: undefined, + status: 0, + finalTallyResult: undefined, + submitTime: undefined, + depositEndTime: undefined, + totalDeposit: [], + votingStartTime: undefined, + votingEndTime: undefined + }; +} + +export const Proposal = { + encode(message: Proposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.content !== undefined) { + Any.encode(message.content, writer.uint32(18).fork()).ldelim(); + } + + if (message.status !== 0) { + writer.uint32(24).int32(message.status); + } + + if (message.finalTallyResult !== undefined) { + TallyResult.encode(message.finalTallyResult, writer.uint32(34).fork()).ldelim(); + } + + if (message.submitTime !== undefined) { + Timestamp.encode(toTimestamp(message.submitTime), writer.uint32(42).fork()).ldelim(); + } + + if (message.depositEndTime !== undefined) { + Timestamp.encode(toTimestamp(message.depositEndTime), writer.uint32(50).fork()).ldelim(); + } + + for (const v of message.totalDeposit) { + Coin.encode(v!, writer.uint32(58).fork()).ldelim(); + } + + if (message.votingStartTime !== undefined) { + Timestamp.encode(toTimestamp(message.votingStartTime), writer.uint32(66).fork()).ldelim(); + } + + if (message.votingEndTime !== undefined) { + Timestamp.encode(toTimestamp(message.votingEndTime), writer.uint32(74).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Proposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.content = Any.decode(reader, reader.uint32()); + break; + + case 3: + message.status = (reader.int32() as any); + break; + + case 4: + message.finalTallyResult = TallyResult.decode(reader, reader.uint32()); + break; + + case 5: + message.submitTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 6: + message.depositEndTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 7: + message.totalDeposit.push(Coin.decode(reader, reader.uint32())); + break; + + case 8: + message.votingStartTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 9: + message.votingEndTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Proposal { + const message = createBaseProposal(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.content = object.content !== undefined && object.content !== null ? Any.fromPartial(object.content) : undefined; + message.status = object.status ?? 0; + message.finalTallyResult = object.finalTallyResult !== undefined && object.finalTallyResult !== null ? TallyResult.fromPartial(object.finalTallyResult) : undefined; + message.submitTime = object.submitTime ?? undefined; + message.depositEndTime = object.depositEndTime ?? undefined; + message.totalDeposit = object.totalDeposit?.map(e => Coin.fromPartial(e)) || []; + message.votingStartTime = object.votingStartTime ?? undefined; + message.votingEndTime = object.votingEndTime ?? undefined; + return message; + } + +}; + +function createBaseTallyResult(): TallyResult { + return { + yes: "", + abstain: "", + no: "", + noWithVeto: "" + }; +} + +export const TallyResult = { + encode(message: TallyResult, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.yes !== "") { + writer.uint32(10).string(message.yes); + } + + if (message.abstain !== "") { + writer.uint32(18).string(message.abstain); + } + + if (message.no !== "") { + writer.uint32(26).string(message.no); + } + + if (message.noWithVeto !== "") { + writer.uint32(34).string(message.noWithVeto); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TallyResult { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTallyResult(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.yes = reader.string(); + break; + + case 2: + message.abstain = reader.string(); + break; + + case 3: + message.no = reader.string(); + break; + + case 4: + message.noWithVeto = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TallyResult { + const message = createBaseTallyResult(); + message.yes = object.yes ?? ""; + message.abstain = object.abstain ?? ""; + message.no = object.no ?? ""; + message.noWithVeto = object.noWithVeto ?? ""; + return message; + } + +}; + +function createBaseVote(): Vote { + return { + proposalId: Long.UZERO, + voter: "", + option: 0, + options: [] + }; +} + +export const Vote = { + encode(message: Vote, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + if (message.option !== 0) { + writer.uint32(24).int32(message.option); + } + + for (const v of message.options) { + WeightedVoteOption.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Vote { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVote(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.voter = reader.string(); + break; + + case 3: + message.option = (reader.int32() as any); + break; + + case 4: + message.options.push(WeightedVoteOption.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Vote { + const message = createBaseVote(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.voter = object.voter ?? ""; + message.option = object.option ?? 0; + message.options = object.options?.map(e => WeightedVoteOption.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseDepositParams(): DepositParams { + return { + minDeposit: [], + maxDepositPeriod: undefined + }; +} + +export const DepositParams = { + encode(message: DepositParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.minDeposit) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.maxDepositPeriod !== undefined) { + Duration.encode(message.maxDepositPeriod, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DepositParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDepositParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.minDeposit.push(Coin.decode(reader, reader.uint32())); + break; + + case 2: + message.maxDepositPeriod = Duration.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DepositParams { + const message = createBaseDepositParams(); + message.minDeposit = object.minDeposit?.map(e => Coin.fromPartial(e)) || []; + message.maxDepositPeriod = object.maxDepositPeriod !== undefined && object.maxDepositPeriod !== null ? Duration.fromPartial(object.maxDepositPeriod) : undefined; + return message; + } + +}; + +function createBaseVotingParams(): VotingParams { + return { + votingPeriod: undefined + }; +} + +export const VotingParams = { + encode(message: VotingParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.votingPeriod !== undefined) { + Duration.encode(message.votingPeriod, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VotingParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVotingParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.votingPeriod = Duration.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): VotingParams { + const message = createBaseVotingParams(); + message.votingPeriod = object.votingPeriod !== undefined && object.votingPeriod !== null ? Duration.fromPartial(object.votingPeriod) : undefined; + return message; + } + +}; + +function createBaseTallyParams(): TallyParams { + return { + quorum: new Uint8Array(), + threshold: new Uint8Array(), + vetoThreshold: new Uint8Array() + }; +} + +export const TallyParams = { + encode(message: TallyParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.quorum.length !== 0) { + writer.uint32(10).bytes(message.quorum); + } + + if (message.threshold.length !== 0) { + writer.uint32(18).bytes(message.threshold); + } + + if (message.vetoThreshold.length !== 0) { + writer.uint32(26).bytes(message.vetoThreshold); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TallyParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTallyParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.quorum = reader.bytes(); + break; + + case 2: + message.threshold = reader.bytes(); + break; + + case 3: + message.vetoThreshold = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TallyParams { + const message = createBaseTallyParams(); + message.quorum = object.quorum ?? new Uint8Array(); + message.threshold = object.threshold ?? new Uint8Array(); + message.vetoThreshold = object.vetoThreshold ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/gov/v1beta1/query.lcd.ts new file mode 100644 index 00000000..1210dbdd --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1beta1/query.lcd.ts @@ -0,0 +1,115 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryProposalRequest, QueryProposalResponseSDKType, QueryProposalsRequest, QueryProposalsResponseSDKType, QueryVoteRequest, QueryVoteResponseSDKType, QueryVotesRequest, QueryVotesResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType, QueryDepositRequest, QueryDepositResponseSDKType, QueryDepositsRequest, QueryDepositsResponseSDKType, QueryTallyResultRequest, QueryTallyResultResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.proposal = this.proposal.bind(this); + this.proposals = this.proposals.bind(this); + this.vote = this.vote.bind(this); + this.votes = this.votes.bind(this); + this.params = this.params.bind(this); + this.deposit = this.deposit.bind(this); + this.deposits = this.deposits.bind(this); + this.tallyResult = this.tallyResult.bind(this); + } + /* Proposal queries proposal details based on ProposalID. */ + + + async proposal(params: QueryProposalRequest): Promise { + const endpoint = `cosmos/gov/v1beta1/proposals/${params.proposalId}`; + return await this.req.get(endpoint); + } + /* Proposals queries all proposals based on given status. */ + + + async proposals(params: QueryProposalsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.proposalStatus !== "undefined") { + options.params.proposal_status = params.proposalStatus; + } + + if (typeof params?.voter !== "undefined") { + options.params.voter = params.voter; + } + + if (typeof params?.depositor !== "undefined") { + options.params.depositor = params.depositor; + } + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/gov/v1beta1/proposals`; + return await this.req.get(endpoint, options); + } + /* Vote queries voted information based on proposalID, voterAddr. */ + + + async vote(params: QueryVoteRequest): Promise { + const endpoint = `cosmos/gov/v1beta1/proposals/${params.proposalId}/votes/${params.voter}`; + return await this.req.get(endpoint); + } + /* Votes queries votes of a given proposal. */ + + + async votes(params: QueryVotesRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/gov/v1beta1/proposals/${params.proposalId}/votes`; + return await this.req.get(endpoint, options); + } + /* Params queries all parameters of the gov module. */ + + + async params(params: QueryParamsRequest): Promise { + const endpoint = `cosmos/gov/v1beta1/params/${params.paramsType}`; + return await this.req.get(endpoint); + } + /* Deposit queries single deposit information based proposalID, depositAddr. */ + + + async deposit(params: QueryDepositRequest): Promise { + const endpoint = `cosmos/gov/v1beta1/proposals/${params.proposalId}/deposits/${params.depositor}`; + return await this.req.get(endpoint); + } + /* Deposits queries all deposits of a single proposal. */ + + + async deposits(params: QueryDepositsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/gov/v1beta1/proposals/${params.proposalId}/deposits`; + return await this.req.get(endpoint, options); + } + /* TallyResult queries the tally of a proposal vote. */ + + + async tallyResult(params: QueryTallyResultRequest): Promise { + const endpoint = `cosmos/gov/v1beta1/proposals/${params.proposalId}/tally`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/gov/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..246d7da9 --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1beta1/query.rpc.Query.ts @@ -0,0 +1,133 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryProposalRequest, QueryProposalResponse, QueryProposalsRequest, QueryProposalsResponse, QueryVoteRequest, QueryVoteResponse, QueryVotesRequest, QueryVotesResponse, QueryParamsRequest, QueryParamsResponse, QueryDepositRequest, QueryDepositResponse, QueryDepositsRequest, QueryDepositsResponse, QueryTallyResultRequest, QueryTallyResultResponse } from "./query"; +/** Query defines the gRPC querier service for gov module */ + +export interface Query { + /** Proposal queries proposal details based on ProposalID. */ + proposal(request: QueryProposalRequest): Promise; + /** Proposals queries all proposals based on given status. */ + + proposals(request: QueryProposalsRequest): Promise; + /** Vote queries voted information based on proposalID, voterAddr. */ + + vote(request: QueryVoteRequest): Promise; + /** Votes queries votes of a given proposal. */ + + votes(request: QueryVotesRequest): Promise; + /** Params queries all parameters of the gov module. */ + + params(request: QueryParamsRequest): Promise; + /** Deposit queries single deposit information based proposalID, depositAddr. */ + + deposit(request: QueryDepositRequest): Promise; + /** Deposits queries all deposits of a single proposal. */ + + deposits(request: QueryDepositsRequest): Promise; + /** TallyResult queries the tally of a proposal vote. */ + + tallyResult(request: QueryTallyResultRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.proposal = this.proposal.bind(this); + this.proposals = this.proposals.bind(this); + this.vote = this.vote.bind(this); + this.votes = this.votes.bind(this); + this.params = this.params.bind(this); + this.deposit = this.deposit.bind(this); + this.deposits = this.deposits.bind(this); + this.tallyResult = this.tallyResult.bind(this); + } + + proposal(request: QueryProposalRequest): Promise { + const data = QueryProposalRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1beta1.Query", "Proposal", data); + return promise.then(data => QueryProposalResponse.decode(new _m0.Reader(data))); + } + + proposals(request: QueryProposalsRequest): Promise { + const data = QueryProposalsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1beta1.Query", "Proposals", data); + return promise.then(data => QueryProposalsResponse.decode(new _m0.Reader(data))); + } + + vote(request: QueryVoteRequest): Promise { + const data = QueryVoteRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1beta1.Query", "Vote", data); + return promise.then(data => QueryVoteResponse.decode(new _m0.Reader(data))); + } + + votes(request: QueryVotesRequest): Promise { + const data = QueryVotesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1beta1.Query", "Votes", data); + return promise.then(data => QueryVotesResponse.decode(new _m0.Reader(data))); + } + + params(request: QueryParamsRequest): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1beta1.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + deposit(request: QueryDepositRequest): Promise { + const data = QueryDepositRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1beta1.Query", "Deposit", data); + return promise.then(data => QueryDepositResponse.decode(new _m0.Reader(data))); + } + + deposits(request: QueryDepositsRequest): Promise { + const data = QueryDepositsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1beta1.Query", "Deposits", data); + return promise.then(data => QueryDepositsResponse.decode(new _m0.Reader(data))); + } + + tallyResult(request: QueryTallyResultRequest): Promise { + const data = QueryTallyResultRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1beta1.Query", "TallyResult", data); + return promise.then(data => QueryTallyResultResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + proposal(request: QueryProposalRequest): Promise { + return queryService.proposal(request); + }, + + proposals(request: QueryProposalsRequest): Promise { + return queryService.proposals(request); + }, + + vote(request: QueryVoteRequest): Promise { + return queryService.vote(request); + }, + + votes(request: QueryVotesRequest): Promise { + return queryService.votes(request); + }, + + params(request: QueryParamsRequest): Promise { + return queryService.params(request); + }, + + deposit(request: QueryDepositRequest): Promise { + return queryService.deposit(request); + }, + + deposits(request: QueryDepositsRequest): Promise { + return queryService.deposits(request); + }, + + tallyResult(request: QueryTallyResultRequest): Promise { + return queryService.tallyResult(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1beta1/query.ts b/packages/codegen/src/cosmos/gov/v1beta1/query.ts new file mode 100644 index 00000000..b1fcdd61 --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1beta1/query.ts @@ -0,0 +1,1074 @@ +import { ProposalStatus, Proposal, ProposalSDKType, Vote, VoteSDKType, VotingParams, VotingParamsSDKType, DepositParams, DepositParamsSDKType, TallyParams, TallyParamsSDKType, Deposit, DepositSDKType, TallyResult, TallyResultSDKType } from "./gov"; +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** QueryProposalRequest is the request type for the Query/Proposal RPC method. */ + +export interface QueryProposalRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; +} +/** QueryProposalRequest is the request type for the Query/Proposal RPC method. */ + +export interface QueryProposalRequestSDKType { + proposal_id: Long; +} +/** QueryProposalResponse is the response type for the Query/Proposal RPC method. */ + +export interface QueryProposalResponse { + proposal?: Proposal; +} +/** QueryProposalResponse is the response type for the Query/Proposal RPC method. */ + +export interface QueryProposalResponseSDKType { + proposal?: ProposalSDKType; +} +/** QueryProposalsRequest is the request type for the Query/Proposals RPC method. */ + +export interface QueryProposalsRequest { + /** proposal_status defines the status of the proposals. */ + proposalStatus: ProposalStatus; + /** voter defines the voter address for the proposals. */ + + voter: string; + /** depositor defines the deposit addresses from the proposals. */ + + depositor: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryProposalsRequest is the request type for the Query/Proposals RPC method. */ + +export interface QueryProposalsRequestSDKType { + proposal_status: ProposalStatus; + voter: string; + depositor: string; + pagination?: PageRequestSDKType; +} +/** + * QueryProposalsResponse is the response type for the Query/Proposals RPC + * method. + */ + +export interface QueryProposalsResponse { + proposals: Proposal[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryProposalsResponse is the response type for the Query/Proposals RPC + * method. + */ + +export interface QueryProposalsResponseSDKType { + proposals: ProposalSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryVoteRequest is the request type for the Query/Vote RPC method. */ + +export interface QueryVoteRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** voter defines the oter address for the proposals. */ + + voter: string; +} +/** QueryVoteRequest is the request type for the Query/Vote RPC method. */ + +export interface QueryVoteRequestSDKType { + proposal_id: Long; + voter: string; +} +/** QueryVoteResponse is the response type for the Query/Vote RPC method. */ + +export interface QueryVoteResponse { + /** vote defined the queried vote. */ + vote?: Vote; +} +/** QueryVoteResponse is the response type for the Query/Vote RPC method. */ + +export interface QueryVoteResponseSDKType { + vote?: VoteSDKType; +} +/** QueryVotesRequest is the request type for the Query/Votes RPC method. */ + +export interface QueryVotesRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryVotesRequest is the request type for the Query/Votes RPC method. */ + +export interface QueryVotesRequestSDKType { + proposal_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryVotesResponse is the response type for the Query/Votes RPC method. */ + +export interface QueryVotesResponse { + /** votes defined the queried votes. */ + votes: Vote[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryVotesResponse is the response type for the Query/Votes RPC method. */ + +export interface QueryVotesResponseSDKType { + votes: VoteSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequest { + /** + * params_type defines which parameters to query for, can be one of "voting", + * "tallying" or "deposit". + */ + paramsType: string; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequestSDKType { + params_type: string; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponse { + /** voting_params defines the parameters related to voting. */ + votingParams?: VotingParams; + /** deposit_params defines the parameters related to deposit. */ + + depositParams?: DepositParams; + /** tally_params defines the parameters related to tally. */ + + tallyParams?: TallyParams; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponseSDKType { + voting_params?: VotingParamsSDKType; + deposit_params?: DepositParamsSDKType; + tally_params?: TallyParamsSDKType; +} +/** QueryDepositRequest is the request type for the Query/Deposit RPC method. */ + +export interface QueryDepositRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** depositor defines the deposit addresses from the proposals. */ + + depositor: string; +} +/** QueryDepositRequest is the request type for the Query/Deposit RPC method. */ + +export interface QueryDepositRequestSDKType { + proposal_id: Long; + depositor: string; +} +/** QueryDepositResponse is the response type for the Query/Deposit RPC method. */ + +export interface QueryDepositResponse { + /** deposit defines the requested deposit. */ + deposit?: Deposit; +} +/** QueryDepositResponse is the response type for the Query/Deposit RPC method. */ + +export interface QueryDepositResponseSDKType { + deposit?: DepositSDKType; +} +/** QueryDepositsRequest is the request type for the Query/Deposits RPC method. */ + +export interface QueryDepositsRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryDepositsRequest is the request type for the Query/Deposits RPC method. */ + +export interface QueryDepositsRequestSDKType { + proposal_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryDepositsResponse is the response type for the Query/Deposits RPC method. */ + +export interface QueryDepositsResponse { + deposits: Deposit[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryDepositsResponse is the response type for the Query/Deposits RPC method. */ + +export interface QueryDepositsResponseSDKType { + deposits: DepositSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryTallyResultRequest is the request type for the Query/Tally RPC method. */ + +export interface QueryTallyResultRequest { + /** proposal_id defines the unique id of the proposal. */ + proposalId: Long; +} +/** QueryTallyResultRequest is the request type for the Query/Tally RPC method. */ + +export interface QueryTallyResultRequestSDKType { + proposal_id: Long; +} +/** QueryTallyResultResponse is the response type for the Query/Tally RPC method. */ + +export interface QueryTallyResultResponse { + /** tally defines the requested tally. */ + tally?: TallyResult; +} +/** QueryTallyResultResponse is the response type for the Query/Tally RPC method. */ + +export interface QueryTallyResultResponseSDKType { + tally?: TallyResultSDKType; +} + +function createBaseQueryProposalRequest(): QueryProposalRequest { + return { + proposalId: Long.UZERO + }; +} + +export const QueryProposalRequest = { + encode(message: QueryProposalRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryProposalRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryProposalRequest { + const message = createBaseQueryProposalRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryProposalResponse(): QueryProposalResponse { + return { + proposal: undefined + }; +} + +export const QueryProposalResponse = { + encode(message: QueryProposalResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.proposal !== undefined) { + Proposal.encode(message.proposal, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryProposalResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposal = Proposal.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryProposalResponse { + const message = createBaseQueryProposalResponse(); + message.proposal = object.proposal !== undefined && object.proposal !== null ? Proposal.fromPartial(object.proposal) : undefined; + return message; + } + +}; + +function createBaseQueryProposalsRequest(): QueryProposalsRequest { + return { + proposalStatus: 0, + voter: "", + depositor: "", + pagination: undefined + }; +} + +export const QueryProposalsRequest = { + encode(message: QueryProposalsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.proposalStatus !== 0) { + writer.uint32(8).int32(message.proposalStatus); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + if (message.depositor !== "") { + writer.uint32(26).string(message.depositor); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryProposalsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalStatus = (reader.int32() as any); + break; + + case 2: + message.voter = reader.string(); + break; + + case 3: + message.depositor = reader.string(); + break; + + case 4: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryProposalsRequest { + const message = createBaseQueryProposalsRequest(); + message.proposalStatus = object.proposalStatus ?? 0; + message.voter = object.voter ?? ""; + message.depositor = object.depositor ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryProposalsResponse(): QueryProposalsResponse { + return { + proposals: [], + pagination: undefined + }; +} + +export const QueryProposalsResponse = { + encode(message: QueryProposalsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.proposals) { + Proposal.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryProposalsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposals.push(Proposal.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryProposalsResponse { + const message = createBaseQueryProposalsResponse(); + message.proposals = object.proposals?.map(e => Proposal.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryVoteRequest(): QueryVoteRequest { + return { + proposalId: Long.UZERO, + voter: "" + }; +} + +export const QueryVoteRequest = { + encode(message: QueryVoteRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVoteRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVoteRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.voter = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVoteRequest { + const message = createBaseQueryVoteRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.voter = object.voter ?? ""; + return message; + } + +}; + +function createBaseQueryVoteResponse(): QueryVoteResponse { + return { + vote: undefined + }; +} + +export const QueryVoteResponse = { + encode(message: QueryVoteResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vote !== undefined) { + Vote.encode(message.vote, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVoteResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVoteResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.vote = Vote.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVoteResponse { + const message = createBaseQueryVoteResponse(); + message.vote = object.vote !== undefined && object.vote !== null ? Vote.fromPartial(object.vote) : undefined; + return message; + } + +}; + +function createBaseQueryVotesRequest(): QueryVotesRequest { + return { + proposalId: Long.UZERO, + pagination: undefined + }; +} + +export const QueryVotesRequest = { + encode(message: QueryVotesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVotesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVotesRequest { + const message = createBaseQueryVotesRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryVotesResponse(): QueryVotesResponse { + return { + votes: [], + pagination: undefined + }; +} + +export const QueryVotesResponse = { + encode(message: QueryVotesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.votes) { + Vote.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVotesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.votes.push(Vote.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVotesResponse { + const message = createBaseQueryVotesResponse(); + message.votes = object.votes?.map(e => Vote.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return { + paramsType: "" + }; +} + +export const QueryParamsRequest = { + encode(message: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.paramsType !== "") { + writer.uint32(10).string(message.paramsType); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.paramsType = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + message.paramsType = object.paramsType ?? ""; + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + votingParams: undefined, + depositParams: undefined, + tallyParams: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.votingParams !== undefined) { + VotingParams.encode(message.votingParams, writer.uint32(10).fork()).ldelim(); + } + + if (message.depositParams !== undefined) { + DepositParams.encode(message.depositParams, writer.uint32(18).fork()).ldelim(); + } + + if (message.tallyParams !== undefined) { + TallyParams.encode(message.tallyParams, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.votingParams = VotingParams.decode(reader, reader.uint32()); + break; + + case 2: + message.depositParams = DepositParams.decode(reader, reader.uint32()); + break; + + case 3: + message.tallyParams = TallyParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.votingParams = object.votingParams !== undefined && object.votingParams !== null ? VotingParams.fromPartial(object.votingParams) : undefined; + message.depositParams = object.depositParams !== undefined && object.depositParams !== null ? DepositParams.fromPartial(object.depositParams) : undefined; + message.tallyParams = object.tallyParams !== undefined && object.tallyParams !== null ? TallyParams.fromPartial(object.tallyParams) : undefined; + return message; + } + +}; + +function createBaseQueryDepositRequest(): QueryDepositRequest { + return { + proposalId: Long.UZERO, + depositor: "" + }; +} + +export const QueryDepositRequest = { + encode(message: QueryDepositRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.depositor !== "") { + writer.uint32(18).string(message.depositor); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDepositRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.depositor = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDepositRequest { + const message = createBaseQueryDepositRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.depositor = object.depositor ?? ""; + return message; + } + +}; + +function createBaseQueryDepositResponse(): QueryDepositResponse { + return { + deposit: undefined + }; +} + +export const QueryDepositResponse = { + encode(message: QueryDepositResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deposit !== undefined) { + Deposit.encode(message.deposit, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDepositResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.deposit = Deposit.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDepositResponse { + const message = createBaseQueryDepositResponse(); + message.deposit = object.deposit !== undefined && object.deposit !== null ? Deposit.fromPartial(object.deposit) : undefined; + return message; + } + +}; + +function createBaseQueryDepositsRequest(): QueryDepositsRequest { + return { + proposalId: Long.UZERO, + pagination: undefined + }; +} + +export const QueryDepositsRequest = { + encode(message: QueryDepositsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDepositsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDepositsRequest { + const message = createBaseQueryDepositsRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDepositsResponse(): QueryDepositsResponse { + return { + deposits: [], + pagination: undefined + }; +} + +export const QueryDepositsResponse = { + encode(message: QueryDepositsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.deposits) { + Deposit.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDepositsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDepositsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.deposits.push(Deposit.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDepositsResponse { + const message = createBaseQueryDepositsResponse(); + message.deposits = object.deposits?.map(e => Deposit.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryTallyResultRequest(): QueryTallyResultRequest { + return { + proposalId: Long.UZERO + }; +} + +export const QueryTallyResultRequest = { + encode(message: QueryTallyResultRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTallyResultRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryTallyResultRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryTallyResultRequest { + const message = createBaseQueryTallyResultRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryTallyResultResponse(): QueryTallyResultResponse { + return { + tally: undefined + }; +} + +export const QueryTallyResultResponse = { + encode(message: QueryTallyResultResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tally !== undefined) { + TallyResult.encode(message.tally, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTallyResultResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryTallyResultResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.tally = TallyResult.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryTallyResultResponse { + const message = createBaseQueryTallyResultResponse(); + message.tally = object.tally !== undefined && object.tally !== null ? TallyResult.fromPartial(object.tally) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1beta1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/gov/v1beta1/tx.rpc.msg.ts new file mode 100644 index 00000000..4e4cc252 --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1beta1/tx.rpc.msg.ts @@ -0,0 +1,58 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgSubmitProposal, MsgSubmitProposalResponse, MsgVote, MsgVoteResponse, MsgVoteWeighted, MsgVoteWeightedResponse, MsgDeposit, MsgDepositResponse } from "./tx"; +/** Msg defines the bank Msg service. */ + +export interface Msg { + /** SubmitProposal defines a method to create new proposal given a content. */ + submitProposal(request: MsgSubmitProposal): Promise; + /** Vote defines a method to add a vote on a specific proposal. */ + + vote(request: MsgVote): Promise; + /** + * VoteWeighted defines a method to add a weighted vote on a specific proposal. + * + * Since: cosmos-sdk 0.43 + */ + + voteWeighted(request: MsgVoteWeighted): Promise; + /** Deposit defines a method to add deposit on a specific proposal. */ + + deposit(request: MsgDeposit): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.submitProposal = this.submitProposal.bind(this); + this.vote = this.vote.bind(this); + this.voteWeighted = this.voteWeighted.bind(this); + this.deposit = this.deposit.bind(this); + } + + submitProposal(request: MsgSubmitProposal): Promise { + const data = MsgSubmitProposal.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1beta1.Msg", "SubmitProposal", data); + return promise.then(data => MsgSubmitProposalResponse.decode(new _m0.Reader(data))); + } + + vote(request: MsgVote): Promise { + const data = MsgVote.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1beta1.Msg", "Vote", data); + return promise.then(data => MsgVoteResponse.decode(new _m0.Reader(data))); + } + + voteWeighted(request: MsgVoteWeighted): Promise { + const data = MsgVoteWeighted.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1beta1.Msg", "VoteWeighted", data); + return promise.then(data => MsgVoteWeightedResponse.decode(new _m0.Reader(data))); + } + + deposit(request: MsgDeposit): Promise { + const data = MsgDeposit.encode(request).finish(); + const promise = this.rpc.request("cosmos.gov.v1beta1.Msg", "Deposit", data); + return promise.then(data => MsgDepositResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/gov/v1beta1/tx.ts b/packages/codegen/src/cosmos/gov/v1beta1/tx.ts new file mode 100644 index 00000000..c89eef26 --- /dev/null +++ b/packages/codegen/src/cosmos/gov/v1beta1/tx.ts @@ -0,0 +1,518 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { VoteOption, WeightedVoteOption, WeightedVoteOptionSDKType } from "./gov"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** + * MsgSubmitProposal defines an sdk.Msg type that supports submitting arbitrary + * proposal Content. + */ + +export interface MsgSubmitProposal { + content?: Any; + initialDeposit: Coin[]; + proposer: string; +} +/** + * MsgSubmitProposal defines an sdk.Msg type that supports submitting arbitrary + * proposal Content. + */ + +export interface MsgSubmitProposalSDKType { + content?: AnySDKType; + initial_deposit: CoinSDKType[]; + proposer: string; +} +/** MsgSubmitProposalResponse defines the Msg/SubmitProposal response type. */ + +export interface MsgSubmitProposalResponse { + proposalId: Long; +} +/** MsgSubmitProposalResponse defines the Msg/SubmitProposal response type. */ + +export interface MsgSubmitProposalResponseSDKType { + proposal_id: Long; +} +/** MsgVote defines a message to cast a vote. */ + +export interface MsgVote { + proposalId: Long; + voter: string; + option: VoteOption; +} +/** MsgVote defines a message to cast a vote. */ + +export interface MsgVoteSDKType { + proposal_id: Long; + voter: string; + option: VoteOption; +} +/** MsgVoteResponse defines the Msg/Vote response type. */ + +export interface MsgVoteResponse {} +/** MsgVoteResponse defines the Msg/Vote response type. */ + +export interface MsgVoteResponseSDKType {} +/** + * MsgVoteWeighted defines a message to cast a vote. + * + * Since: cosmos-sdk 0.43 + */ + +export interface MsgVoteWeighted { + proposalId: Long; + voter: string; + options: WeightedVoteOption[]; +} +/** + * MsgVoteWeighted defines a message to cast a vote. + * + * Since: cosmos-sdk 0.43 + */ + +export interface MsgVoteWeightedSDKType { + proposal_id: Long; + voter: string; + options: WeightedVoteOptionSDKType[]; +} +/** + * MsgVoteWeightedResponse defines the Msg/VoteWeighted response type. + * + * Since: cosmos-sdk 0.43 + */ + +export interface MsgVoteWeightedResponse {} +/** + * MsgVoteWeightedResponse defines the Msg/VoteWeighted response type. + * + * Since: cosmos-sdk 0.43 + */ + +export interface MsgVoteWeightedResponseSDKType {} +/** MsgDeposit defines a message to submit a deposit to an existing proposal. */ + +export interface MsgDeposit { + proposalId: Long; + depositor: string; + amount: Coin[]; +} +/** MsgDeposit defines a message to submit a deposit to an existing proposal. */ + +export interface MsgDepositSDKType { + proposal_id: Long; + depositor: string; + amount: CoinSDKType[]; +} +/** MsgDepositResponse defines the Msg/Deposit response type. */ + +export interface MsgDepositResponse {} +/** MsgDepositResponse defines the Msg/Deposit response type. */ + +export interface MsgDepositResponseSDKType {} + +function createBaseMsgSubmitProposal(): MsgSubmitProposal { + return { + content: undefined, + initialDeposit: [], + proposer: "" + }; +} + +export const MsgSubmitProposal = { + encode(message: MsgSubmitProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.content !== undefined) { + Any.encode(message.content, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.initialDeposit) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.proposer !== "") { + writer.uint32(26).string(message.proposer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSubmitProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.content = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.initialDeposit.push(Coin.decode(reader, reader.uint32())); + break; + + case 3: + message.proposer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSubmitProposal { + const message = createBaseMsgSubmitProposal(); + message.content = object.content !== undefined && object.content !== null ? Any.fromPartial(object.content) : undefined; + message.initialDeposit = object.initialDeposit?.map(e => Coin.fromPartial(e)) || []; + message.proposer = object.proposer ?? ""; + return message; + } + +}; + +function createBaseMsgSubmitProposalResponse(): MsgSubmitProposalResponse { + return { + proposalId: Long.UZERO + }; +} + +export const MsgSubmitProposalResponse = { + encode(message: MsgSubmitProposalResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitProposalResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSubmitProposalResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSubmitProposalResponse { + const message = createBaseMsgSubmitProposalResponse(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + return message; + } + +}; + +function createBaseMsgVote(): MsgVote { + return { + proposalId: Long.UZERO, + voter: "", + option: 0 + }; +} + +export const MsgVote = { + encode(message: MsgVote, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + if (message.option !== 0) { + writer.uint32(24).int32(message.option); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVote { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgVote(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.voter = reader.string(); + break; + + case 3: + message.option = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgVote { + const message = createBaseMsgVote(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.voter = object.voter ?? ""; + message.option = object.option ?? 0; + return message; + } + +}; + +function createBaseMsgVoteResponse(): MsgVoteResponse { + return {}; +} + +export const MsgVoteResponse = { + encode(_: MsgVoteResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgVoteResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgVoteResponse { + const message = createBaseMsgVoteResponse(); + return message; + } + +}; + +function createBaseMsgVoteWeighted(): MsgVoteWeighted { + return { + proposalId: Long.UZERO, + voter: "", + options: [] + }; +} + +export const MsgVoteWeighted = { + encode(message: MsgVoteWeighted, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + for (const v of message.options) { + WeightedVoteOption.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteWeighted { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgVoteWeighted(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.voter = reader.string(); + break; + + case 3: + message.options.push(WeightedVoteOption.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgVoteWeighted { + const message = createBaseMsgVoteWeighted(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.voter = object.voter ?? ""; + message.options = object.options?.map(e => WeightedVoteOption.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgVoteWeightedResponse(): MsgVoteWeightedResponse { + return {}; +} + +export const MsgVoteWeightedResponse = { + encode(_: MsgVoteWeightedResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteWeightedResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgVoteWeightedResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgVoteWeightedResponse { + const message = createBaseMsgVoteWeightedResponse(); + return message; + } + +}; + +function createBaseMsgDeposit(): MsgDeposit { + return { + proposalId: Long.UZERO, + depositor: "", + amount: [] + }; +} + +export const MsgDeposit = { + encode(message: MsgDeposit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.depositor !== "") { + writer.uint32(18).string(message.depositor); + } + + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeposit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDeposit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.depositor = reader.string(); + break; + + case 3: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgDeposit { + const message = createBaseMsgDeposit(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.depositor = object.depositor ?? ""; + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgDepositResponse(): MsgDepositResponse { + return {}; +} + +export const MsgDepositResponse = { + encode(_: MsgDepositResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDepositResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDepositResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgDepositResponse { + const message = createBaseMsgDepositResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/group/v1/events.ts b/packages/codegen/src/cosmos/group/v1/events.ts new file mode 100644 index 00000000..34f5a304 --- /dev/null +++ b/packages/codegen/src/cosmos/group/v1/events.ts @@ -0,0 +1,535 @@ +import { ProposalExecutorResult } from "./types"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** EventCreateGroup is an event emitted when a group is created. */ + +export interface EventCreateGroup { + /** group_id is the unique ID of the group. */ + groupId: Long; +} +/** EventCreateGroup is an event emitted when a group is created. */ + +export interface EventCreateGroupSDKType { + group_id: Long; +} +/** EventUpdateGroup is an event emitted when a group is updated. */ + +export interface EventUpdateGroup { + /** group_id is the unique ID of the group. */ + groupId: Long; +} +/** EventUpdateGroup is an event emitted when a group is updated. */ + +export interface EventUpdateGroupSDKType { + group_id: Long; +} +/** EventCreateGroupPolicy is an event emitted when a group policy is created. */ + +export interface EventCreateGroupPolicy { + /** address is the account address of the group policy. */ + address: string; +} +/** EventCreateGroupPolicy is an event emitted when a group policy is created. */ + +export interface EventCreateGroupPolicySDKType { + address: string; +} +/** EventUpdateGroupPolicy is an event emitted when a group policy is updated. */ + +export interface EventUpdateGroupPolicy { + /** address is the account address of the group policy. */ + address: string; +} +/** EventUpdateGroupPolicy is an event emitted when a group policy is updated. */ + +export interface EventUpdateGroupPolicySDKType { + address: string; +} +/** EventSubmitProposal is an event emitted when a proposal is created. */ + +export interface EventSubmitProposal { + /** proposal_id is the unique ID of the proposal. */ + proposalId: Long; +} +/** EventSubmitProposal is an event emitted when a proposal is created. */ + +export interface EventSubmitProposalSDKType { + proposal_id: Long; +} +/** EventWithdrawProposal is an event emitted when a proposal is withdrawn. */ + +export interface EventWithdrawProposal { + /** proposal_id is the unique ID of the proposal. */ + proposalId: Long; +} +/** EventWithdrawProposal is an event emitted when a proposal is withdrawn. */ + +export interface EventWithdrawProposalSDKType { + proposal_id: Long; +} +/** EventVote is an event emitted when a voter votes on a proposal. */ + +export interface EventVote { + /** proposal_id is the unique ID of the proposal. */ + proposalId: Long; +} +/** EventVote is an event emitted when a voter votes on a proposal. */ + +export interface EventVoteSDKType { + proposal_id: Long; +} +/** EventExec is an event emitted when a proposal is executed. */ + +export interface EventExec { + /** proposal_id is the unique ID of the proposal. */ + proposalId: Long; + /** result is the proposal execution result. */ + + result: ProposalExecutorResult; +} +/** EventExec is an event emitted when a proposal is executed. */ + +export interface EventExecSDKType { + proposal_id: Long; + result: ProposalExecutorResult; +} +/** EventLeaveGroup is an event emitted when group member leaves the group. */ + +export interface EventLeaveGroup { + /** group_id is the unique ID of the group. */ + groupId: Long; + /** address is the account address of the group member. */ + + address: string; +} +/** EventLeaveGroup is an event emitted when group member leaves the group. */ + +export interface EventLeaveGroupSDKType { + group_id: Long; + address: string; +} + +function createBaseEventCreateGroup(): EventCreateGroup { + return { + groupId: Long.UZERO + }; +} + +export const EventCreateGroup = { + encode(message: EventCreateGroup, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.groupId.isZero()) { + writer.uint32(8).uint64(message.groupId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventCreateGroup { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventCreateGroup(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groupId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventCreateGroup { + const message = createBaseEventCreateGroup(); + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + return message; + } + +}; + +function createBaseEventUpdateGroup(): EventUpdateGroup { + return { + groupId: Long.UZERO + }; +} + +export const EventUpdateGroup = { + encode(message: EventUpdateGroup, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.groupId.isZero()) { + writer.uint32(8).uint64(message.groupId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventUpdateGroup { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventUpdateGroup(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groupId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventUpdateGroup { + const message = createBaseEventUpdateGroup(); + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + return message; + } + +}; + +function createBaseEventCreateGroupPolicy(): EventCreateGroupPolicy { + return { + address: "" + }; +} + +export const EventCreateGroupPolicy = { + encode(message: EventCreateGroupPolicy, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventCreateGroupPolicy { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventCreateGroupPolicy(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventCreateGroupPolicy { + const message = createBaseEventCreateGroupPolicy(); + message.address = object.address ?? ""; + return message; + } + +}; + +function createBaseEventUpdateGroupPolicy(): EventUpdateGroupPolicy { + return { + address: "" + }; +} + +export const EventUpdateGroupPolicy = { + encode(message: EventUpdateGroupPolicy, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventUpdateGroupPolicy { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventUpdateGroupPolicy(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventUpdateGroupPolicy { + const message = createBaseEventUpdateGroupPolicy(); + message.address = object.address ?? ""; + return message; + } + +}; + +function createBaseEventSubmitProposal(): EventSubmitProposal { + return { + proposalId: Long.UZERO + }; +} + +export const EventSubmitProposal = { + encode(message: EventSubmitProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventSubmitProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventSubmitProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventSubmitProposal { + const message = createBaseEventSubmitProposal(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + return message; + } + +}; + +function createBaseEventWithdrawProposal(): EventWithdrawProposal { + return { + proposalId: Long.UZERO + }; +} + +export const EventWithdrawProposal = { + encode(message: EventWithdrawProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventWithdrawProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventWithdrawProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventWithdrawProposal { + const message = createBaseEventWithdrawProposal(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + return message; + } + +}; + +function createBaseEventVote(): EventVote { + return { + proposalId: Long.UZERO + }; +} + +export const EventVote = { + encode(message: EventVote, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventVote { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventVote(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventVote { + const message = createBaseEventVote(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + return message; + } + +}; + +function createBaseEventExec(): EventExec { + return { + proposalId: Long.UZERO, + result: 0 + }; +} + +export const EventExec = { + encode(message: EventExec, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.result !== 0) { + writer.uint32(16).int32(message.result); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventExec { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventExec(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.result = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventExec { + const message = createBaseEventExec(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.result = object.result ?? 0; + return message; + } + +}; + +function createBaseEventLeaveGroup(): EventLeaveGroup { + return { + groupId: Long.UZERO, + address: "" + }; +} + +export const EventLeaveGroup = { + encode(message: EventLeaveGroup, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.groupId.isZero()) { + writer.uint32(8).uint64(message.groupId); + } + + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventLeaveGroup { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventLeaveGroup(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groupId = (reader.uint64() as Long); + break; + + case 2: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventLeaveGroup { + const message = createBaseEventLeaveGroup(); + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + message.address = object.address ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/group/v1/genesis.ts b/packages/codegen/src/cosmos/group/v1/genesis.ts new file mode 100644 index 00000000..290cc51c --- /dev/null +++ b/packages/codegen/src/cosmos/group/v1/genesis.ts @@ -0,0 +1,166 @@ +import { GroupInfo, GroupInfoSDKType, GroupMember, GroupMemberSDKType, GroupPolicyInfo, GroupPolicyInfoSDKType, Proposal, ProposalSDKType, Vote, VoteSDKType } from "./types"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisState defines the group module's genesis state. */ + +export interface GenesisState { + /** + * group_seq is the group table orm.Sequence, + * it is used to get the next group ID. + */ + groupSeq: Long; + /** groups is the list of groups info. */ + + groups: GroupInfo[]; + /** group_members is the list of groups members. */ + + groupMembers: GroupMember[]; + /** + * group_policy_seq is the group policy table orm.Sequence, + * it is used to generate the next group policy account address. + */ + + groupPolicySeq: Long; + /** group_policies is the list of group policies info. */ + + groupPolicies: GroupPolicyInfo[]; + /** + * proposal_seq is the proposal table orm.Sequence, + * it is used to get the next proposal ID. + */ + + proposalSeq: Long; + /** proposals is the list of proposals. */ + + proposals: Proposal[]; + /** votes is the list of votes. */ + + votes: Vote[]; +} +/** GenesisState defines the group module's genesis state. */ + +export interface GenesisStateSDKType { + group_seq: Long; + groups: GroupInfoSDKType[]; + group_members: GroupMemberSDKType[]; + group_policy_seq: Long; + group_policies: GroupPolicyInfoSDKType[]; + proposal_seq: Long; + proposals: ProposalSDKType[]; + votes: VoteSDKType[]; +} + +function createBaseGenesisState(): GenesisState { + return { + groupSeq: Long.UZERO, + groups: [], + groupMembers: [], + groupPolicySeq: Long.UZERO, + groupPolicies: [], + proposalSeq: Long.UZERO, + proposals: [], + votes: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.groupSeq.isZero()) { + writer.uint32(8).uint64(message.groupSeq); + } + + for (const v of message.groups) { + GroupInfo.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.groupMembers) { + GroupMember.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + if (!message.groupPolicySeq.isZero()) { + writer.uint32(32).uint64(message.groupPolicySeq); + } + + for (const v of message.groupPolicies) { + GroupPolicyInfo.encode(v!, writer.uint32(42).fork()).ldelim(); + } + + if (!message.proposalSeq.isZero()) { + writer.uint32(48).uint64(message.proposalSeq); + } + + for (const v of message.proposals) { + Proposal.encode(v!, writer.uint32(58).fork()).ldelim(); + } + + for (const v of message.votes) { + Vote.encode(v!, writer.uint32(66).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groupSeq = (reader.uint64() as Long); + break; + + case 2: + message.groups.push(GroupInfo.decode(reader, reader.uint32())); + break; + + case 3: + message.groupMembers.push(GroupMember.decode(reader, reader.uint32())); + break; + + case 4: + message.groupPolicySeq = (reader.uint64() as Long); + break; + + case 5: + message.groupPolicies.push(GroupPolicyInfo.decode(reader, reader.uint32())); + break; + + case 6: + message.proposalSeq = (reader.uint64() as Long); + break; + + case 7: + message.proposals.push(Proposal.decode(reader, reader.uint32())); + break; + + case 8: + message.votes.push(Vote.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.groupSeq = object.groupSeq !== undefined && object.groupSeq !== null ? Long.fromValue(object.groupSeq) : Long.UZERO; + message.groups = object.groups?.map(e => GroupInfo.fromPartial(e)) || []; + message.groupMembers = object.groupMembers?.map(e => GroupMember.fromPartial(e)) || []; + message.groupPolicySeq = object.groupPolicySeq !== undefined && object.groupPolicySeq !== null ? Long.fromValue(object.groupPolicySeq) : Long.UZERO; + message.groupPolicies = object.groupPolicies?.map(e => GroupPolicyInfo.fromPartial(e)) || []; + message.proposalSeq = object.proposalSeq !== undefined && object.proposalSeq !== null ? Long.fromValue(object.proposalSeq) : Long.UZERO; + message.proposals = object.proposals?.map(e => Proposal.fromPartial(e)) || []; + message.votes = object.votes?.map(e => Vote.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/group/v1/query.lcd.ts b/packages/codegen/src/cosmos/group/v1/query.lcd.ts new file mode 100644 index 00000000..dae3205d --- /dev/null +++ b/packages/codegen/src/cosmos/group/v1/query.lcd.ts @@ -0,0 +1,183 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryGroupInfoRequest, QueryGroupInfoResponseSDKType, QueryGroupPolicyInfoRequest, QueryGroupPolicyInfoResponseSDKType, QueryGroupMembersRequest, QueryGroupMembersResponseSDKType, QueryGroupsByAdminRequest, QueryGroupsByAdminResponseSDKType, QueryGroupPoliciesByGroupRequest, QueryGroupPoliciesByGroupResponseSDKType, QueryGroupPoliciesByAdminRequest, QueryGroupPoliciesByAdminResponseSDKType, QueryProposalRequest, QueryProposalResponseSDKType, QueryProposalsByGroupPolicyRequest, QueryProposalsByGroupPolicyResponseSDKType, QueryVoteByProposalVoterRequest, QueryVoteByProposalVoterResponseSDKType, QueryVotesByProposalRequest, QueryVotesByProposalResponseSDKType, QueryVotesByVoterRequest, QueryVotesByVoterResponseSDKType, QueryGroupsByMemberRequest, QueryGroupsByMemberResponseSDKType, QueryTallyResultRequest, QueryTallyResultResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.groupInfo = this.groupInfo.bind(this); + this.groupPolicyInfo = this.groupPolicyInfo.bind(this); + this.groupMembers = this.groupMembers.bind(this); + this.groupsByAdmin = this.groupsByAdmin.bind(this); + this.groupPoliciesByGroup = this.groupPoliciesByGroup.bind(this); + this.groupPoliciesByAdmin = this.groupPoliciesByAdmin.bind(this); + this.proposal = this.proposal.bind(this); + this.proposalsByGroupPolicy = this.proposalsByGroupPolicy.bind(this); + this.voteByProposalVoter = this.voteByProposalVoter.bind(this); + this.votesByProposal = this.votesByProposal.bind(this); + this.votesByVoter = this.votesByVoter.bind(this); + this.groupsByMember = this.groupsByMember.bind(this); + this.tallyResult = this.tallyResult.bind(this); + } + /* GroupInfo queries group info based on group id. */ + + + async groupInfo(params: QueryGroupInfoRequest): Promise { + const endpoint = `cosmos/group/v1/group_info/${params.groupId}`; + return await this.req.get(endpoint); + } + /* GroupPolicyInfo queries group policy info based on account address of group policy. */ + + + async groupPolicyInfo(params: QueryGroupPolicyInfoRequest): Promise { + const endpoint = `cosmos/group/v1/group_policy_info/${params.address}`; + return await this.req.get(endpoint); + } + /* GroupMembers queries members of a group */ + + + async groupMembers(params: QueryGroupMembersRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/group/v1/group_members/${params.groupId}`; + return await this.req.get(endpoint, options); + } + /* GroupsByAdmin queries groups by admin address. */ + + + async groupsByAdmin(params: QueryGroupsByAdminRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/group/v1/groups_by_admin/${params.admin}`; + return await this.req.get(endpoint, options); + } + /* GroupPoliciesByGroup queries group policies by group id. */ + + + async groupPoliciesByGroup(params: QueryGroupPoliciesByGroupRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/group/v1/group_policies_by_group/${params.groupId}`; + return await this.req.get(endpoint, options); + } + /* GroupsByAdmin queries group policies by admin address. */ + + + async groupPoliciesByAdmin(params: QueryGroupPoliciesByAdminRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/group/v1/group_policies_by_admin/${params.admin}`; + return await this.req.get(endpoint, options); + } + /* Proposal queries a proposal based on proposal id. */ + + + async proposal(params: QueryProposalRequest): Promise { + const endpoint = `cosmos/group/v1/proposal/${params.proposalId}`; + return await this.req.get(endpoint); + } + /* ProposalsByGroupPolicy queries proposals based on account address of group policy. */ + + + async proposalsByGroupPolicy(params: QueryProposalsByGroupPolicyRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/group/v1/proposals_by_group_policy/${params.address}`; + return await this.req.get(endpoint, options); + } + /* VoteByProposalVoter queries a vote by proposal id and voter. */ + + + async voteByProposalVoter(params: QueryVoteByProposalVoterRequest): Promise { + const endpoint = `cosmos/group/v1/vote_by_proposal_voter/${params.proposalId}/${params.voter}`; + return await this.req.get(endpoint); + } + /* VotesByProposal queries a vote by proposal. */ + + + async votesByProposal(params: QueryVotesByProposalRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/group/v1/votes_by_proposal/${params.proposalId}`; + return await this.req.get(endpoint, options); + } + /* VotesByVoter queries a vote by voter. */ + + + async votesByVoter(params: QueryVotesByVoterRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/group/v1/votes_by_voter/${params.voter}`; + return await this.req.get(endpoint, options); + } + /* GroupsByMember queries groups by member address. */ + + + async groupsByMember(params: QueryGroupsByMemberRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/group/v1/groups_by_member/${params.address}`; + return await this.req.get(endpoint, options); + } + /* TallyResult queries the tally of a proposal votes. */ + + + async tallyResult(params: QueryTallyResultRequest): Promise { + const endpoint = `cosmos/group/v1/proposals/${params.proposalId}/tally`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/group/v1/query.rpc.Query.ts b/packages/codegen/src/cosmos/group/v1/query.rpc.Query.ts new file mode 100644 index 00000000..27d76df9 --- /dev/null +++ b/packages/codegen/src/cosmos/group/v1/query.rpc.Query.ts @@ -0,0 +1,203 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryGroupInfoRequest, QueryGroupInfoResponse, QueryGroupPolicyInfoRequest, QueryGroupPolicyInfoResponse, QueryGroupMembersRequest, QueryGroupMembersResponse, QueryGroupsByAdminRequest, QueryGroupsByAdminResponse, QueryGroupPoliciesByGroupRequest, QueryGroupPoliciesByGroupResponse, QueryGroupPoliciesByAdminRequest, QueryGroupPoliciesByAdminResponse, QueryProposalRequest, QueryProposalResponse, QueryProposalsByGroupPolicyRequest, QueryProposalsByGroupPolicyResponse, QueryVoteByProposalVoterRequest, QueryVoteByProposalVoterResponse, QueryVotesByProposalRequest, QueryVotesByProposalResponse, QueryVotesByVoterRequest, QueryVotesByVoterResponse, QueryGroupsByMemberRequest, QueryGroupsByMemberResponse, QueryTallyResultRequest, QueryTallyResultResponse } from "./query"; +/** Query is the cosmos.group.v1 Query service. */ + +export interface Query { + /** GroupInfo queries group info based on group id. */ + groupInfo(request: QueryGroupInfoRequest): Promise; + /** GroupPolicyInfo queries group policy info based on account address of group policy. */ + + groupPolicyInfo(request: QueryGroupPolicyInfoRequest): Promise; + /** GroupMembers queries members of a group */ + + groupMembers(request: QueryGroupMembersRequest): Promise; + /** GroupsByAdmin queries groups by admin address. */ + + groupsByAdmin(request: QueryGroupsByAdminRequest): Promise; + /** GroupPoliciesByGroup queries group policies by group id. */ + + groupPoliciesByGroup(request: QueryGroupPoliciesByGroupRequest): Promise; + /** GroupsByAdmin queries group policies by admin address. */ + + groupPoliciesByAdmin(request: QueryGroupPoliciesByAdminRequest): Promise; + /** Proposal queries a proposal based on proposal id. */ + + proposal(request: QueryProposalRequest): Promise; + /** ProposalsByGroupPolicy queries proposals based on account address of group policy. */ + + proposalsByGroupPolicy(request: QueryProposalsByGroupPolicyRequest): Promise; + /** VoteByProposalVoter queries a vote by proposal id and voter. */ + + voteByProposalVoter(request: QueryVoteByProposalVoterRequest): Promise; + /** VotesByProposal queries a vote by proposal. */ + + votesByProposal(request: QueryVotesByProposalRequest): Promise; + /** VotesByVoter queries a vote by voter. */ + + votesByVoter(request: QueryVotesByVoterRequest): Promise; + /** GroupsByMember queries groups by member address. */ + + groupsByMember(request: QueryGroupsByMemberRequest): Promise; + /** TallyResult queries the tally of a proposal votes. */ + + tallyResult(request: QueryTallyResultRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.groupInfo = this.groupInfo.bind(this); + this.groupPolicyInfo = this.groupPolicyInfo.bind(this); + this.groupMembers = this.groupMembers.bind(this); + this.groupsByAdmin = this.groupsByAdmin.bind(this); + this.groupPoliciesByGroup = this.groupPoliciesByGroup.bind(this); + this.groupPoliciesByAdmin = this.groupPoliciesByAdmin.bind(this); + this.proposal = this.proposal.bind(this); + this.proposalsByGroupPolicy = this.proposalsByGroupPolicy.bind(this); + this.voteByProposalVoter = this.voteByProposalVoter.bind(this); + this.votesByProposal = this.votesByProposal.bind(this); + this.votesByVoter = this.votesByVoter.bind(this); + this.groupsByMember = this.groupsByMember.bind(this); + this.tallyResult = this.tallyResult.bind(this); + } + + groupInfo(request: QueryGroupInfoRequest): Promise { + const data = QueryGroupInfoRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "GroupInfo", data); + return promise.then(data => QueryGroupInfoResponse.decode(new _m0.Reader(data))); + } + + groupPolicyInfo(request: QueryGroupPolicyInfoRequest): Promise { + const data = QueryGroupPolicyInfoRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "GroupPolicyInfo", data); + return promise.then(data => QueryGroupPolicyInfoResponse.decode(new _m0.Reader(data))); + } + + groupMembers(request: QueryGroupMembersRequest): Promise { + const data = QueryGroupMembersRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "GroupMembers", data); + return promise.then(data => QueryGroupMembersResponse.decode(new _m0.Reader(data))); + } + + groupsByAdmin(request: QueryGroupsByAdminRequest): Promise { + const data = QueryGroupsByAdminRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "GroupsByAdmin", data); + return promise.then(data => QueryGroupsByAdminResponse.decode(new _m0.Reader(data))); + } + + groupPoliciesByGroup(request: QueryGroupPoliciesByGroupRequest): Promise { + const data = QueryGroupPoliciesByGroupRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "GroupPoliciesByGroup", data); + return promise.then(data => QueryGroupPoliciesByGroupResponse.decode(new _m0.Reader(data))); + } + + groupPoliciesByAdmin(request: QueryGroupPoliciesByAdminRequest): Promise { + const data = QueryGroupPoliciesByAdminRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "GroupPoliciesByAdmin", data); + return promise.then(data => QueryGroupPoliciesByAdminResponse.decode(new _m0.Reader(data))); + } + + proposal(request: QueryProposalRequest): Promise { + const data = QueryProposalRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "Proposal", data); + return promise.then(data => QueryProposalResponse.decode(new _m0.Reader(data))); + } + + proposalsByGroupPolicy(request: QueryProposalsByGroupPolicyRequest): Promise { + const data = QueryProposalsByGroupPolicyRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "ProposalsByGroupPolicy", data); + return promise.then(data => QueryProposalsByGroupPolicyResponse.decode(new _m0.Reader(data))); + } + + voteByProposalVoter(request: QueryVoteByProposalVoterRequest): Promise { + const data = QueryVoteByProposalVoterRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "VoteByProposalVoter", data); + return promise.then(data => QueryVoteByProposalVoterResponse.decode(new _m0.Reader(data))); + } + + votesByProposal(request: QueryVotesByProposalRequest): Promise { + const data = QueryVotesByProposalRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "VotesByProposal", data); + return promise.then(data => QueryVotesByProposalResponse.decode(new _m0.Reader(data))); + } + + votesByVoter(request: QueryVotesByVoterRequest): Promise { + const data = QueryVotesByVoterRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "VotesByVoter", data); + return promise.then(data => QueryVotesByVoterResponse.decode(new _m0.Reader(data))); + } + + groupsByMember(request: QueryGroupsByMemberRequest): Promise { + const data = QueryGroupsByMemberRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "GroupsByMember", data); + return promise.then(data => QueryGroupsByMemberResponse.decode(new _m0.Reader(data))); + } + + tallyResult(request: QueryTallyResultRequest): Promise { + const data = QueryTallyResultRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Query", "TallyResult", data); + return promise.then(data => QueryTallyResultResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + groupInfo(request: QueryGroupInfoRequest): Promise { + return queryService.groupInfo(request); + }, + + groupPolicyInfo(request: QueryGroupPolicyInfoRequest): Promise { + return queryService.groupPolicyInfo(request); + }, + + groupMembers(request: QueryGroupMembersRequest): Promise { + return queryService.groupMembers(request); + }, + + groupsByAdmin(request: QueryGroupsByAdminRequest): Promise { + return queryService.groupsByAdmin(request); + }, + + groupPoliciesByGroup(request: QueryGroupPoliciesByGroupRequest): Promise { + return queryService.groupPoliciesByGroup(request); + }, + + groupPoliciesByAdmin(request: QueryGroupPoliciesByAdminRequest): Promise { + return queryService.groupPoliciesByAdmin(request); + }, + + proposal(request: QueryProposalRequest): Promise { + return queryService.proposal(request); + }, + + proposalsByGroupPolicy(request: QueryProposalsByGroupPolicyRequest): Promise { + return queryService.proposalsByGroupPolicy(request); + }, + + voteByProposalVoter(request: QueryVoteByProposalVoterRequest): Promise { + return queryService.voteByProposalVoter(request); + }, + + votesByProposal(request: QueryVotesByProposalRequest): Promise { + return queryService.votesByProposal(request); + }, + + votesByVoter(request: QueryVotesByVoterRequest): Promise { + return queryService.votesByVoter(request); + }, + + groupsByMember(request: QueryGroupsByMemberRequest): Promise { + return queryService.groupsByMember(request); + }, + + tallyResult(request: QueryTallyResultRequest): Promise { + return queryService.tallyResult(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/group/v1/query.ts b/packages/codegen/src/cosmos/group/v1/query.ts new file mode 100644 index 00000000..b3069e80 --- /dev/null +++ b/packages/codegen/src/cosmos/group/v1/query.ts @@ -0,0 +1,1698 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { GroupInfo, GroupInfoSDKType, GroupPolicyInfo, GroupPolicyInfoSDKType, GroupMember, GroupMemberSDKType, Proposal, ProposalSDKType, Vote, VoteSDKType, TallyResult, TallyResultSDKType } from "./types"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** QueryGroupInfoRequest is the Query/GroupInfo request type. */ + +export interface QueryGroupInfoRequest { + /** group_id is the unique ID of the group. */ + groupId: Long; +} +/** QueryGroupInfoRequest is the Query/GroupInfo request type. */ + +export interface QueryGroupInfoRequestSDKType { + group_id: Long; +} +/** QueryGroupInfoResponse is the Query/GroupInfo response type. */ + +export interface QueryGroupInfoResponse { + /** info is the GroupInfo for the group. */ + info?: GroupInfo; +} +/** QueryGroupInfoResponse is the Query/GroupInfo response type. */ + +export interface QueryGroupInfoResponseSDKType { + info?: GroupInfoSDKType; +} +/** QueryGroupPolicyInfoRequest is the Query/GroupPolicyInfo request type. */ + +export interface QueryGroupPolicyInfoRequest { + /** address is the account address of the group policy. */ + address: string; +} +/** QueryGroupPolicyInfoRequest is the Query/GroupPolicyInfo request type. */ + +export interface QueryGroupPolicyInfoRequestSDKType { + address: string; +} +/** QueryGroupPolicyInfoResponse is the Query/GroupPolicyInfo response type. */ + +export interface QueryGroupPolicyInfoResponse { + /** info is the GroupPolicyInfo for the group policy. */ + info?: GroupPolicyInfo; +} +/** QueryGroupPolicyInfoResponse is the Query/GroupPolicyInfo response type. */ + +export interface QueryGroupPolicyInfoResponseSDKType { + info?: GroupPolicyInfoSDKType; +} +/** QueryGroupMembersRequest is the Query/GroupMembers request type. */ + +export interface QueryGroupMembersRequest { + /** group_id is the unique ID of the group. */ + groupId: Long; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryGroupMembersRequest is the Query/GroupMembers request type. */ + +export interface QueryGroupMembersRequestSDKType { + group_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryGroupMembersResponse is the Query/GroupMembersResponse response type. */ + +export interface QueryGroupMembersResponse { + /** members are the members of the group with given group_id. */ + members: GroupMember[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryGroupMembersResponse is the Query/GroupMembersResponse response type. */ + +export interface QueryGroupMembersResponseSDKType { + members: GroupMemberSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryGroupsByAdminRequest is the Query/GroupsByAdmin request type. */ + +export interface QueryGroupsByAdminRequest { + /** admin is the account address of a group's admin. */ + admin: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryGroupsByAdminRequest is the Query/GroupsByAdmin request type. */ + +export interface QueryGroupsByAdminRequestSDKType { + admin: string; + pagination?: PageRequestSDKType; +} +/** QueryGroupsByAdminResponse is the Query/GroupsByAdminResponse response type. */ + +export interface QueryGroupsByAdminResponse { + /** groups are the groups info with the provided admin. */ + groups: GroupInfo[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryGroupsByAdminResponse is the Query/GroupsByAdminResponse response type. */ + +export interface QueryGroupsByAdminResponseSDKType { + groups: GroupInfoSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryGroupPoliciesByGroupRequest is the Query/GroupPoliciesByGroup request type. */ + +export interface QueryGroupPoliciesByGroupRequest { + /** group_id is the unique ID of the group policy's group. */ + groupId: Long; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryGroupPoliciesByGroupRequest is the Query/GroupPoliciesByGroup request type. */ + +export interface QueryGroupPoliciesByGroupRequestSDKType { + group_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryGroupPoliciesByGroupResponse is the Query/GroupPoliciesByGroup response type. */ + +export interface QueryGroupPoliciesByGroupResponse { + /** group_policies are the group policies info associated with the provided group. */ + groupPolicies: GroupPolicyInfo[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryGroupPoliciesByGroupResponse is the Query/GroupPoliciesByGroup response type. */ + +export interface QueryGroupPoliciesByGroupResponseSDKType { + group_policies: GroupPolicyInfoSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryGroupPoliciesByAdminRequest is the Query/GroupPoliciesByAdmin request type. */ + +export interface QueryGroupPoliciesByAdminRequest { + /** admin is the admin address of the group policy. */ + admin: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryGroupPoliciesByAdminRequest is the Query/GroupPoliciesByAdmin request type. */ + +export interface QueryGroupPoliciesByAdminRequestSDKType { + admin: string; + pagination?: PageRequestSDKType; +} +/** QueryGroupPoliciesByAdminResponse is the Query/GroupPoliciesByAdmin response type. */ + +export interface QueryGroupPoliciesByAdminResponse { + /** group_policies are the group policies info with provided admin. */ + groupPolicies: GroupPolicyInfo[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryGroupPoliciesByAdminResponse is the Query/GroupPoliciesByAdmin response type. */ + +export interface QueryGroupPoliciesByAdminResponseSDKType { + group_policies: GroupPolicyInfoSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryProposalRequest is the Query/Proposal request type. */ + +export interface QueryProposalRequest { + /** proposal_id is the unique ID of a proposal. */ + proposalId: Long; +} +/** QueryProposalRequest is the Query/Proposal request type. */ + +export interface QueryProposalRequestSDKType { + proposal_id: Long; +} +/** QueryProposalResponse is the Query/Proposal response type. */ + +export interface QueryProposalResponse { + /** proposal is the proposal info. */ + proposal?: Proposal; +} +/** QueryProposalResponse is the Query/Proposal response type. */ + +export interface QueryProposalResponseSDKType { + proposal?: ProposalSDKType; +} +/** QueryProposalsByGroupPolicyRequest is the Query/ProposalByGroupPolicy request type. */ + +export interface QueryProposalsByGroupPolicyRequest { + /** address is the account address of the group policy related to proposals. */ + address: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryProposalsByGroupPolicyRequest is the Query/ProposalByGroupPolicy request type. */ + +export interface QueryProposalsByGroupPolicyRequestSDKType { + address: string; + pagination?: PageRequestSDKType; +} +/** QueryProposalsByGroupPolicyResponse is the Query/ProposalByGroupPolicy response type. */ + +export interface QueryProposalsByGroupPolicyResponse { + /** proposals are the proposals with given group policy. */ + proposals: Proposal[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryProposalsByGroupPolicyResponse is the Query/ProposalByGroupPolicy response type. */ + +export interface QueryProposalsByGroupPolicyResponseSDKType { + proposals: ProposalSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryVoteByProposalVoterRequest is the Query/VoteByProposalVoter request type. */ + +export interface QueryVoteByProposalVoterRequest { + /** proposal_id is the unique ID of a proposal. */ + proposalId: Long; + /** voter is a proposal voter account address. */ + + voter: string; +} +/** QueryVoteByProposalVoterRequest is the Query/VoteByProposalVoter request type. */ + +export interface QueryVoteByProposalVoterRequestSDKType { + proposal_id: Long; + voter: string; +} +/** QueryVoteByProposalVoterResponse is the Query/VoteByProposalVoter response type. */ + +export interface QueryVoteByProposalVoterResponse { + /** vote is the vote with given proposal_id and voter. */ + vote?: Vote; +} +/** QueryVoteByProposalVoterResponse is the Query/VoteByProposalVoter response type. */ + +export interface QueryVoteByProposalVoterResponseSDKType { + vote?: VoteSDKType; +} +/** QueryVotesByProposalRequest is the Query/VotesByProposal request type. */ + +export interface QueryVotesByProposalRequest { + /** proposal_id is the unique ID of a proposal. */ + proposalId: Long; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryVotesByProposalRequest is the Query/VotesByProposal request type. */ + +export interface QueryVotesByProposalRequestSDKType { + proposal_id: Long; + pagination?: PageRequestSDKType; +} +/** QueryVotesByProposalResponse is the Query/VotesByProposal response type. */ + +export interface QueryVotesByProposalResponse { + /** votes are the list of votes for given proposal_id. */ + votes: Vote[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryVotesByProposalResponse is the Query/VotesByProposal response type. */ + +export interface QueryVotesByProposalResponseSDKType { + votes: VoteSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryVotesByVoterRequest is the Query/VotesByVoter request type. */ + +export interface QueryVotesByVoterRequest { + /** voter is a proposal voter account address. */ + voter: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryVotesByVoterRequest is the Query/VotesByVoter request type. */ + +export interface QueryVotesByVoterRequestSDKType { + voter: string; + pagination?: PageRequestSDKType; +} +/** QueryVotesByVoterResponse is the Query/VotesByVoter response type. */ + +export interface QueryVotesByVoterResponse { + /** votes are the list of votes by given voter. */ + votes: Vote[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryVotesByVoterResponse is the Query/VotesByVoter response type. */ + +export interface QueryVotesByVoterResponseSDKType { + votes: VoteSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryGroupsByMemberRequest is the Query/GroupsByMember request type. */ + +export interface QueryGroupsByMemberRequest { + /** address is the group member address. */ + address: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryGroupsByMemberRequest is the Query/GroupsByMember request type. */ + +export interface QueryGroupsByMemberRequestSDKType { + address: string; + pagination?: PageRequestSDKType; +} +/** QueryGroupsByMemberResponse is the Query/GroupsByMember response type. */ + +export interface QueryGroupsByMemberResponse { + /** groups are the groups info with the provided group member. */ + groups: GroupInfo[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryGroupsByMemberResponse is the Query/GroupsByMember response type. */ + +export interface QueryGroupsByMemberResponseSDKType { + groups: GroupInfoSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryTallyResultRequest is the Query/TallyResult request type. */ + +export interface QueryTallyResultRequest { + /** proposal_id is the unique id of a proposal. */ + proposalId: Long; +} +/** QueryTallyResultRequest is the Query/TallyResult request type. */ + +export interface QueryTallyResultRequestSDKType { + proposal_id: Long; +} +/** QueryTallyResultResponse is the Query/TallyResult response type. */ + +export interface QueryTallyResultResponse { + /** tally defines the requested tally. */ + tally?: TallyResult; +} +/** QueryTallyResultResponse is the Query/TallyResult response type. */ + +export interface QueryTallyResultResponseSDKType { + tally?: TallyResultSDKType; +} + +function createBaseQueryGroupInfoRequest(): QueryGroupInfoRequest { + return { + groupId: Long.UZERO + }; +} + +export const QueryGroupInfoRequest = { + encode(message: QueryGroupInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.groupId.isZero()) { + writer.uint32(8).uint64(message.groupId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupInfoRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groupId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupInfoRequest { + const message = createBaseQueryGroupInfoRequest(); + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryGroupInfoResponse(): QueryGroupInfoResponse { + return { + info: undefined + }; +} + +export const QueryGroupInfoResponse = { + encode(message: QueryGroupInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.info !== undefined) { + GroupInfo.encode(message.info, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupInfoResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.info = GroupInfo.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupInfoResponse { + const message = createBaseQueryGroupInfoResponse(); + message.info = object.info !== undefined && object.info !== null ? GroupInfo.fromPartial(object.info) : undefined; + return message; + } + +}; + +function createBaseQueryGroupPolicyInfoRequest(): QueryGroupPolicyInfoRequest { + return { + address: "" + }; +} + +export const QueryGroupPolicyInfoRequest = { + encode(message: QueryGroupPolicyInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupPolicyInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupPolicyInfoRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupPolicyInfoRequest { + const message = createBaseQueryGroupPolicyInfoRequest(); + message.address = object.address ?? ""; + return message; + } + +}; + +function createBaseQueryGroupPolicyInfoResponse(): QueryGroupPolicyInfoResponse { + return { + info: undefined + }; +} + +export const QueryGroupPolicyInfoResponse = { + encode(message: QueryGroupPolicyInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.info !== undefined) { + GroupPolicyInfo.encode(message.info, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupPolicyInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupPolicyInfoResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.info = GroupPolicyInfo.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupPolicyInfoResponse { + const message = createBaseQueryGroupPolicyInfoResponse(); + message.info = object.info !== undefined && object.info !== null ? GroupPolicyInfo.fromPartial(object.info) : undefined; + return message; + } + +}; + +function createBaseQueryGroupMembersRequest(): QueryGroupMembersRequest { + return { + groupId: Long.UZERO, + pagination: undefined + }; +} + +export const QueryGroupMembersRequest = { + encode(message: QueryGroupMembersRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.groupId.isZero()) { + writer.uint32(8).uint64(message.groupId); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupMembersRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupMembersRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groupId = (reader.uint64() as Long); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupMembersRequest { + const message = createBaseQueryGroupMembersRequest(); + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGroupMembersResponse(): QueryGroupMembersResponse { + return { + members: [], + pagination: undefined + }; +} + +export const QueryGroupMembersResponse = { + encode(message: QueryGroupMembersResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.members) { + GroupMember.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupMembersResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupMembersResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.members.push(GroupMember.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupMembersResponse { + const message = createBaseQueryGroupMembersResponse(); + message.members = object.members?.map(e => GroupMember.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGroupsByAdminRequest(): QueryGroupsByAdminRequest { + return { + admin: "", + pagination: undefined + }; +} + +export const QueryGroupsByAdminRequest = { + encode(message: QueryGroupsByAdminRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.admin !== "") { + writer.uint32(10).string(message.admin); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupsByAdminRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupsByAdminRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.admin = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupsByAdminRequest { + const message = createBaseQueryGroupsByAdminRequest(); + message.admin = object.admin ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGroupsByAdminResponse(): QueryGroupsByAdminResponse { + return { + groups: [], + pagination: undefined + }; +} + +export const QueryGroupsByAdminResponse = { + encode(message: QueryGroupsByAdminResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.groups) { + GroupInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupsByAdminResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupsByAdminResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groups.push(GroupInfo.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupsByAdminResponse { + const message = createBaseQueryGroupsByAdminResponse(); + message.groups = object.groups?.map(e => GroupInfo.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGroupPoliciesByGroupRequest(): QueryGroupPoliciesByGroupRequest { + return { + groupId: Long.UZERO, + pagination: undefined + }; +} + +export const QueryGroupPoliciesByGroupRequest = { + encode(message: QueryGroupPoliciesByGroupRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.groupId.isZero()) { + writer.uint32(8).uint64(message.groupId); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupPoliciesByGroupRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupPoliciesByGroupRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groupId = (reader.uint64() as Long); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupPoliciesByGroupRequest { + const message = createBaseQueryGroupPoliciesByGroupRequest(); + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGroupPoliciesByGroupResponse(): QueryGroupPoliciesByGroupResponse { + return { + groupPolicies: [], + pagination: undefined + }; +} + +export const QueryGroupPoliciesByGroupResponse = { + encode(message: QueryGroupPoliciesByGroupResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.groupPolicies) { + GroupPolicyInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupPoliciesByGroupResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupPoliciesByGroupResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groupPolicies.push(GroupPolicyInfo.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupPoliciesByGroupResponse { + const message = createBaseQueryGroupPoliciesByGroupResponse(); + message.groupPolicies = object.groupPolicies?.map(e => GroupPolicyInfo.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGroupPoliciesByAdminRequest(): QueryGroupPoliciesByAdminRequest { + return { + admin: "", + pagination: undefined + }; +} + +export const QueryGroupPoliciesByAdminRequest = { + encode(message: QueryGroupPoliciesByAdminRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.admin !== "") { + writer.uint32(10).string(message.admin); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupPoliciesByAdminRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupPoliciesByAdminRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.admin = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupPoliciesByAdminRequest { + const message = createBaseQueryGroupPoliciesByAdminRequest(); + message.admin = object.admin ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGroupPoliciesByAdminResponse(): QueryGroupPoliciesByAdminResponse { + return { + groupPolicies: [], + pagination: undefined + }; +} + +export const QueryGroupPoliciesByAdminResponse = { + encode(message: QueryGroupPoliciesByAdminResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.groupPolicies) { + GroupPolicyInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupPoliciesByAdminResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupPoliciesByAdminResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groupPolicies.push(GroupPolicyInfo.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupPoliciesByAdminResponse { + const message = createBaseQueryGroupPoliciesByAdminResponse(); + message.groupPolicies = object.groupPolicies?.map(e => GroupPolicyInfo.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryProposalRequest(): QueryProposalRequest { + return { + proposalId: Long.UZERO + }; +} + +export const QueryProposalRequest = { + encode(message: QueryProposalRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryProposalRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryProposalRequest { + const message = createBaseQueryProposalRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryProposalResponse(): QueryProposalResponse { + return { + proposal: undefined + }; +} + +export const QueryProposalResponse = { + encode(message: QueryProposalResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.proposal !== undefined) { + Proposal.encode(message.proposal, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryProposalResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposal = Proposal.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryProposalResponse { + const message = createBaseQueryProposalResponse(); + message.proposal = object.proposal !== undefined && object.proposal !== null ? Proposal.fromPartial(object.proposal) : undefined; + return message; + } + +}; + +function createBaseQueryProposalsByGroupPolicyRequest(): QueryProposalsByGroupPolicyRequest { + return { + address: "", + pagination: undefined + }; +} + +export const QueryProposalsByGroupPolicyRequest = { + encode(message: QueryProposalsByGroupPolicyRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalsByGroupPolicyRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryProposalsByGroupPolicyRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryProposalsByGroupPolicyRequest { + const message = createBaseQueryProposalsByGroupPolicyRequest(); + message.address = object.address ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryProposalsByGroupPolicyResponse(): QueryProposalsByGroupPolicyResponse { + return { + proposals: [], + pagination: undefined + }; +} + +export const QueryProposalsByGroupPolicyResponse = { + encode(message: QueryProposalsByGroupPolicyResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.proposals) { + Proposal.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryProposalsByGroupPolicyResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryProposalsByGroupPolicyResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposals.push(Proposal.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryProposalsByGroupPolicyResponse { + const message = createBaseQueryProposalsByGroupPolicyResponse(); + message.proposals = object.proposals?.map(e => Proposal.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryVoteByProposalVoterRequest(): QueryVoteByProposalVoterRequest { + return { + proposalId: Long.UZERO, + voter: "" + }; +} + +export const QueryVoteByProposalVoterRequest = { + encode(message: QueryVoteByProposalVoterRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVoteByProposalVoterRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVoteByProposalVoterRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.voter = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVoteByProposalVoterRequest { + const message = createBaseQueryVoteByProposalVoterRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.voter = object.voter ?? ""; + return message; + } + +}; + +function createBaseQueryVoteByProposalVoterResponse(): QueryVoteByProposalVoterResponse { + return { + vote: undefined + }; +} + +export const QueryVoteByProposalVoterResponse = { + encode(message: QueryVoteByProposalVoterResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vote !== undefined) { + Vote.encode(message.vote, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVoteByProposalVoterResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVoteByProposalVoterResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.vote = Vote.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVoteByProposalVoterResponse { + const message = createBaseQueryVoteByProposalVoterResponse(); + message.vote = object.vote !== undefined && object.vote !== null ? Vote.fromPartial(object.vote) : undefined; + return message; + } + +}; + +function createBaseQueryVotesByProposalRequest(): QueryVotesByProposalRequest { + return { + proposalId: Long.UZERO, + pagination: undefined + }; +} + +export const QueryVotesByProposalRequest = { + encode(message: QueryVotesByProposalRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesByProposalRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVotesByProposalRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVotesByProposalRequest { + const message = createBaseQueryVotesByProposalRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryVotesByProposalResponse(): QueryVotesByProposalResponse { + return { + votes: [], + pagination: undefined + }; +} + +export const QueryVotesByProposalResponse = { + encode(message: QueryVotesByProposalResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.votes) { + Vote.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesByProposalResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVotesByProposalResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.votes.push(Vote.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVotesByProposalResponse { + const message = createBaseQueryVotesByProposalResponse(); + message.votes = object.votes?.map(e => Vote.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryVotesByVoterRequest(): QueryVotesByVoterRequest { + return { + voter: "", + pagination: undefined + }; +} + +export const QueryVotesByVoterRequest = { + encode(message: QueryVotesByVoterRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.voter !== "") { + writer.uint32(10).string(message.voter); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesByVoterRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVotesByVoterRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.voter = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVotesByVoterRequest { + const message = createBaseQueryVotesByVoterRequest(); + message.voter = object.voter ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryVotesByVoterResponse(): QueryVotesByVoterResponse { + return { + votes: [], + pagination: undefined + }; +} + +export const QueryVotesByVoterResponse = { + encode(message: QueryVotesByVoterResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.votes) { + Vote.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryVotesByVoterResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryVotesByVoterResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.votes.push(Vote.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryVotesByVoterResponse { + const message = createBaseQueryVotesByVoterResponse(); + message.votes = object.votes?.map(e => Vote.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGroupsByMemberRequest(): QueryGroupsByMemberRequest { + return { + address: "", + pagination: undefined + }; +} + +export const QueryGroupsByMemberRequest = { + encode(message: QueryGroupsByMemberRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupsByMemberRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupsByMemberRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupsByMemberRequest { + const message = createBaseQueryGroupsByMemberRequest(); + message.address = object.address ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryGroupsByMemberResponse(): QueryGroupsByMemberResponse { + return { + groups: [], + pagination: undefined + }; +} + +export const QueryGroupsByMemberResponse = { + encode(message: QueryGroupsByMemberResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.groups) { + GroupInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGroupsByMemberResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGroupsByMemberResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groups.push(GroupInfo.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryGroupsByMemberResponse { + const message = createBaseQueryGroupsByMemberResponse(); + message.groups = object.groups?.map(e => GroupInfo.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryTallyResultRequest(): QueryTallyResultRequest { + return { + proposalId: Long.UZERO + }; +} + +export const QueryTallyResultRequest = { + encode(message: QueryTallyResultRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTallyResultRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryTallyResultRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryTallyResultRequest { + const message = createBaseQueryTallyResultRequest(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryTallyResultResponse(): QueryTallyResultResponse { + return { + tally: undefined + }; +} + +export const QueryTallyResultResponse = { + encode(message: QueryTallyResultResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tally !== undefined) { + TallyResult.encode(message.tally, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryTallyResultResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryTallyResultResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.tally = TallyResult.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryTallyResultResponse { + const message = createBaseQueryTallyResultResponse(); + message.tally = object.tally !== undefined && object.tally !== null ? TallyResult.fromPartial(object.tally) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/group/v1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/group/v1/tx.rpc.msg.ts new file mode 100644 index 00000000..b921d00c --- /dev/null +++ b/packages/codegen/src/cosmos/group/v1/tx.rpc.msg.ts @@ -0,0 +1,154 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgCreateGroup, MsgCreateGroupResponse, MsgUpdateGroupMembers, MsgUpdateGroupMembersResponse, MsgUpdateGroupAdmin, MsgUpdateGroupAdminResponse, MsgUpdateGroupMetadata, MsgUpdateGroupMetadataResponse, MsgCreateGroupPolicy, MsgCreateGroupPolicyResponse, MsgCreateGroupWithPolicy, MsgCreateGroupWithPolicyResponse, MsgUpdateGroupPolicyAdmin, MsgUpdateGroupPolicyAdminResponse, MsgUpdateGroupPolicyDecisionPolicy, MsgUpdateGroupPolicyDecisionPolicyResponse, MsgUpdateGroupPolicyMetadata, MsgUpdateGroupPolicyMetadataResponse, MsgSubmitProposal, MsgSubmitProposalResponse, MsgWithdrawProposal, MsgWithdrawProposalResponse, MsgVote, MsgVoteResponse, MsgExec, MsgExecResponse, MsgLeaveGroup, MsgLeaveGroupResponse } from "./tx"; +/** Msg is the cosmos.group.v1 Msg service. */ + +export interface Msg { + /** CreateGroup creates a new group with an admin account address, a list of members and some optional metadata. */ + createGroup(request: MsgCreateGroup): Promise; + /** UpdateGroupMembers updates the group members with given group id and admin address. */ + + updateGroupMembers(request: MsgUpdateGroupMembers): Promise; + /** UpdateGroupAdmin updates the group admin with given group id and previous admin address. */ + + updateGroupAdmin(request: MsgUpdateGroupAdmin): Promise; + /** UpdateGroupMetadata updates the group metadata with given group id and admin address. */ + + updateGroupMetadata(request: MsgUpdateGroupMetadata): Promise; + /** CreateGroupPolicy creates a new group policy using given DecisionPolicy. */ + + createGroupPolicy(request: MsgCreateGroupPolicy): Promise; + /** CreateGroupWithPolicy creates a new group with policy. */ + + createGroupWithPolicy(request: MsgCreateGroupWithPolicy): Promise; + /** UpdateGroupPolicyAdmin updates a group policy admin. */ + + updateGroupPolicyAdmin(request: MsgUpdateGroupPolicyAdmin): Promise; + /** UpdateGroupPolicyDecisionPolicy allows a group policy's decision policy to be updated. */ + + updateGroupPolicyDecisionPolicy(request: MsgUpdateGroupPolicyDecisionPolicy): Promise; + /** UpdateGroupPolicyMetadata updates a group policy metadata. */ + + updateGroupPolicyMetadata(request: MsgUpdateGroupPolicyMetadata): Promise; + /** SubmitProposal submits a new proposal. */ + + submitProposal(request: MsgSubmitProposal): Promise; + /** WithdrawProposal aborts a proposal. */ + + withdrawProposal(request: MsgWithdrawProposal): Promise; + /** Vote allows a voter to vote on a proposal. */ + + vote(request: MsgVote): Promise; + /** Exec executes a proposal. */ + + exec(request: MsgExec): Promise; + /** LeaveGroup allows a group member to leave the group. */ + + leaveGroup(request: MsgLeaveGroup): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.createGroup = this.createGroup.bind(this); + this.updateGroupMembers = this.updateGroupMembers.bind(this); + this.updateGroupAdmin = this.updateGroupAdmin.bind(this); + this.updateGroupMetadata = this.updateGroupMetadata.bind(this); + this.createGroupPolicy = this.createGroupPolicy.bind(this); + this.createGroupWithPolicy = this.createGroupWithPolicy.bind(this); + this.updateGroupPolicyAdmin = this.updateGroupPolicyAdmin.bind(this); + this.updateGroupPolicyDecisionPolicy = this.updateGroupPolicyDecisionPolicy.bind(this); + this.updateGroupPolicyMetadata = this.updateGroupPolicyMetadata.bind(this); + this.submitProposal = this.submitProposal.bind(this); + this.withdrawProposal = this.withdrawProposal.bind(this); + this.vote = this.vote.bind(this); + this.exec = this.exec.bind(this); + this.leaveGroup = this.leaveGroup.bind(this); + } + + createGroup(request: MsgCreateGroup): Promise { + const data = MsgCreateGroup.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "CreateGroup", data); + return promise.then(data => MsgCreateGroupResponse.decode(new _m0.Reader(data))); + } + + updateGroupMembers(request: MsgUpdateGroupMembers): Promise { + const data = MsgUpdateGroupMembers.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "UpdateGroupMembers", data); + return promise.then(data => MsgUpdateGroupMembersResponse.decode(new _m0.Reader(data))); + } + + updateGroupAdmin(request: MsgUpdateGroupAdmin): Promise { + const data = MsgUpdateGroupAdmin.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "UpdateGroupAdmin", data); + return promise.then(data => MsgUpdateGroupAdminResponse.decode(new _m0.Reader(data))); + } + + updateGroupMetadata(request: MsgUpdateGroupMetadata): Promise { + const data = MsgUpdateGroupMetadata.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "UpdateGroupMetadata", data); + return promise.then(data => MsgUpdateGroupMetadataResponse.decode(new _m0.Reader(data))); + } + + createGroupPolicy(request: MsgCreateGroupPolicy): Promise { + const data = MsgCreateGroupPolicy.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "CreateGroupPolicy", data); + return promise.then(data => MsgCreateGroupPolicyResponse.decode(new _m0.Reader(data))); + } + + createGroupWithPolicy(request: MsgCreateGroupWithPolicy): Promise { + const data = MsgCreateGroupWithPolicy.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "CreateGroupWithPolicy", data); + return promise.then(data => MsgCreateGroupWithPolicyResponse.decode(new _m0.Reader(data))); + } + + updateGroupPolicyAdmin(request: MsgUpdateGroupPolicyAdmin): Promise { + const data = MsgUpdateGroupPolicyAdmin.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "UpdateGroupPolicyAdmin", data); + return promise.then(data => MsgUpdateGroupPolicyAdminResponse.decode(new _m0.Reader(data))); + } + + updateGroupPolicyDecisionPolicy(request: MsgUpdateGroupPolicyDecisionPolicy): Promise { + const data = MsgUpdateGroupPolicyDecisionPolicy.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "UpdateGroupPolicyDecisionPolicy", data); + return promise.then(data => MsgUpdateGroupPolicyDecisionPolicyResponse.decode(new _m0.Reader(data))); + } + + updateGroupPolicyMetadata(request: MsgUpdateGroupPolicyMetadata): Promise { + const data = MsgUpdateGroupPolicyMetadata.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "UpdateGroupPolicyMetadata", data); + return promise.then(data => MsgUpdateGroupPolicyMetadataResponse.decode(new _m0.Reader(data))); + } + + submitProposal(request: MsgSubmitProposal): Promise { + const data = MsgSubmitProposal.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "SubmitProposal", data); + return promise.then(data => MsgSubmitProposalResponse.decode(new _m0.Reader(data))); + } + + withdrawProposal(request: MsgWithdrawProposal): Promise { + const data = MsgWithdrawProposal.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "WithdrawProposal", data); + return promise.then(data => MsgWithdrawProposalResponse.decode(new _m0.Reader(data))); + } + + vote(request: MsgVote): Promise { + const data = MsgVote.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "Vote", data); + return promise.then(data => MsgVoteResponse.decode(new _m0.Reader(data))); + } + + exec(request: MsgExec): Promise { + const data = MsgExec.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "Exec", data); + return promise.then(data => MsgExecResponse.decode(new _m0.Reader(data))); + } + + leaveGroup(request: MsgLeaveGroup): Promise { + const data = MsgLeaveGroup.encode(request).finish(); + const promise = this.rpc.request("cosmos.group.v1.Msg", "LeaveGroup", data); + return promise.then(data => MsgLeaveGroupResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/group/v1/tx.ts b/packages/codegen/src/cosmos/group/v1/tx.ts new file mode 100644 index 00000000..f37265df --- /dev/null +++ b/packages/codegen/src/cosmos/group/v1/tx.ts @@ -0,0 +1,1949 @@ +import { Member, MemberSDKType, VoteOption } from "./types"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** Exec defines modes of execution of a proposal on creation or on new vote. */ + +export enum Exec { + /** + * EXEC_UNSPECIFIED - An empty value means that there should be a separate + * MsgExec request for the proposal to execute. + */ + EXEC_UNSPECIFIED = 0, + + /** + * EXEC_TRY - Try to execute the proposal immediately. + * If the proposal is not allowed per the DecisionPolicy, + * the proposal will still be open and could + * be executed at a later point. + */ + EXEC_TRY = 1, + UNRECOGNIZED = -1, +} +export const ExecSDKType = Exec; +export function execFromJSON(object: any): Exec { + switch (object) { + case 0: + case "EXEC_UNSPECIFIED": + return Exec.EXEC_UNSPECIFIED; + + case 1: + case "EXEC_TRY": + return Exec.EXEC_TRY; + + case -1: + case "UNRECOGNIZED": + default: + return Exec.UNRECOGNIZED; + } +} +export function execToJSON(object: Exec): string { + switch (object) { + case Exec.EXEC_UNSPECIFIED: + return "EXEC_UNSPECIFIED"; + + case Exec.EXEC_TRY: + return "EXEC_TRY"; + + case Exec.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** MsgCreateGroup is the Msg/CreateGroup request type. */ + +export interface MsgCreateGroup { + /** admin is the account address of the group admin. */ + admin: string; + /** members defines the group members. */ + + members: Member[]; + /** metadata is any arbitrary metadata to attached to the group. */ + + metadata: string; +} +/** MsgCreateGroup is the Msg/CreateGroup request type. */ + +export interface MsgCreateGroupSDKType { + admin: string; + members: MemberSDKType[]; + metadata: string; +} +/** MsgCreateGroupResponse is the Msg/CreateGroup response type. */ + +export interface MsgCreateGroupResponse { + /** group_id is the unique ID of the newly created group. */ + groupId: Long; +} +/** MsgCreateGroupResponse is the Msg/CreateGroup response type. */ + +export interface MsgCreateGroupResponseSDKType { + group_id: Long; +} +/** MsgUpdateGroupMembers is the Msg/UpdateGroupMembers request type. */ + +export interface MsgUpdateGroupMembers { + /** admin is the account address of the group admin. */ + admin: string; + /** group_id is the unique ID of the group. */ + + groupId: Long; + /** + * member_updates is the list of members to update, + * set weight to 0 to remove a member. + */ + + memberUpdates: Member[]; +} +/** MsgUpdateGroupMembers is the Msg/UpdateGroupMembers request type. */ + +export interface MsgUpdateGroupMembersSDKType { + admin: string; + group_id: Long; + member_updates: MemberSDKType[]; +} +/** MsgUpdateGroupMembersResponse is the Msg/UpdateGroupMembers response type. */ + +export interface MsgUpdateGroupMembersResponse {} +/** MsgUpdateGroupMembersResponse is the Msg/UpdateGroupMembers response type. */ + +export interface MsgUpdateGroupMembersResponseSDKType {} +/** MsgUpdateGroupAdmin is the Msg/UpdateGroupAdmin request type. */ + +export interface MsgUpdateGroupAdmin { + /** admin is the current account address of the group admin. */ + admin: string; + /** group_id is the unique ID of the group. */ + + groupId: Long; + /** new_admin is the group new admin account address. */ + + newAdmin: string; +} +/** MsgUpdateGroupAdmin is the Msg/UpdateGroupAdmin request type. */ + +export interface MsgUpdateGroupAdminSDKType { + admin: string; + group_id: Long; + new_admin: string; +} +/** MsgUpdateGroupAdminResponse is the Msg/UpdateGroupAdmin response type. */ + +export interface MsgUpdateGroupAdminResponse {} +/** MsgUpdateGroupAdminResponse is the Msg/UpdateGroupAdmin response type. */ + +export interface MsgUpdateGroupAdminResponseSDKType {} +/** MsgUpdateGroupMetadata is the Msg/UpdateGroupMetadata request type. */ + +export interface MsgUpdateGroupMetadata { + /** admin is the account address of the group admin. */ + admin: string; + /** group_id is the unique ID of the group. */ + + groupId: Long; + /** metadata is the updated group's metadata. */ + + metadata: string; +} +/** MsgUpdateGroupMetadata is the Msg/UpdateGroupMetadata request type. */ + +export interface MsgUpdateGroupMetadataSDKType { + admin: string; + group_id: Long; + metadata: string; +} +/** MsgUpdateGroupMetadataResponse is the Msg/UpdateGroupMetadata response type. */ + +export interface MsgUpdateGroupMetadataResponse {} +/** MsgUpdateGroupMetadataResponse is the Msg/UpdateGroupMetadata response type. */ + +export interface MsgUpdateGroupMetadataResponseSDKType {} +/** MsgCreateGroupPolicy is the Msg/CreateGroupPolicy request type. */ + +export interface MsgCreateGroupPolicy { + /** admin is the account address of the group admin. */ + admin: string; + /** group_id is the unique ID of the group. */ + + groupId: Long; + /** metadata is any arbitrary metadata attached to the group policy. */ + + metadata: string; + /** decision_policy specifies the group policy's decision policy. */ + + decisionPolicy?: Any; +} +/** MsgCreateGroupPolicy is the Msg/CreateGroupPolicy request type. */ + +export interface MsgCreateGroupPolicySDKType { + admin: string; + group_id: Long; + metadata: string; + decision_policy?: AnySDKType; +} +/** MsgCreateGroupPolicyResponse is the Msg/CreateGroupPolicy response type. */ + +export interface MsgCreateGroupPolicyResponse { + /** address is the account address of the newly created group policy. */ + address: string; +} +/** MsgCreateGroupPolicyResponse is the Msg/CreateGroupPolicy response type. */ + +export interface MsgCreateGroupPolicyResponseSDKType { + address: string; +} +/** MsgUpdateGroupPolicyAdmin is the Msg/UpdateGroupPolicyAdmin request type. */ + +export interface MsgUpdateGroupPolicyAdmin { + /** admin is the account address of the group admin. */ + admin: string; + /** address is the account address of the group policy. */ + + address: string; + /** new_admin is the new group policy admin. */ + + newAdmin: string; +} +/** MsgUpdateGroupPolicyAdmin is the Msg/UpdateGroupPolicyAdmin request type. */ + +export interface MsgUpdateGroupPolicyAdminSDKType { + admin: string; + address: string; + new_admin: string; +} +/** MsgCreateGroupWithPolicy is the Msg/CreateGroupWithPolicy request type. */ + +export interface MsgCreateGroupWithPolicy { + /** admin is the account address of the group and group policy admin. */ + admin: string; + /** members defines the group members. */ + + members: Member[]; + /** group_metadata is any arbitrary metadata attached to the group. */ + + groupMetadata: string; + /** group_policy_metadata is any arbitrary metadata attached to the group policy. */ + + groupPolicyMetadata: string; + /** group_policy_as_admin is a boolean field, if set to true, the group policy account address will be used as group and group policy admin. */ + + groupPolicyAsAdmin: boolean; + /** decision_policy specifies the group policy's decision policy. */ + + decisionPolicy?: Any; +} +/** MsgCreateGroupWithPolicy is the Msg/CreateGroupWithPolicy request type. */ + +export interface MsgCreateGroupWithPolicySDKType { + admin: string; + members: MemberSDKType[]; + group_metadata: string; + group_policy_metadata: string; + group_policy_as_admin: boolean; + decision_policy?: AnySDKType; +} +/** MsgCreateGroupWithPolicyResponse is the Msg/CreateGroupWithPolicy response type. */ + +export interface MsgCreateGroupWithPolicyResponse { + /** group_id is the unique ID of the newly created group with policy. */ + groupId: Long; + /** group_policy_address is the account address of the newly created group policy. */ + + groupPolicyAddress: string; +} +/** MsgCreateGroupWithPolicyResponse is the Msg/CreateGroupWithPolicy response type. */ + +export interface MsgCreateGroupWithPolicyResponseSDKType { + group_id: Long; + group_policy_address: string; +} +/** MsgUpdateGroupPolicyAdminResponse is the Msg/UpdateGroupPolicyAdmin response type. */ + +export interface MsgUpdateGroupPolicyAdminResponse {} +/** MsgUpdateGroupPolicyAdminResponse is the Msg/UpdateGroupPolicyAdmin response type. */ + +export interface MsgUpdateGroupPolicyAdminResponseSDKType {} +/** MsgUpdateGroupPolicyDecisionPolicy is the Msg/UpdateGroupPolicyDecisionPolicy request type. */ + +export interface MsgUpdateGroupPolicyDecisionPolicy { + /** admin is the account address of the group admin. */ + admin: string; + /** address is the account address of group policy. */ + + address: string; + /** decision_policy is the updated group policy's decision policy. */ + + decisionPolicy?: Any; +} +/** MsgUpdateGroupPolicyDecisionPolicy is the Msg/UpdateGroupPolicyDecisionPolicy request type. */ + +export interface MsgUpdateGroupPolicyDecisionPolicySDKType { + admin: string; + address: string; + decision_policy?: AnySDKType; +} +/** MsgUpdateGroupPolicyDecisionPolicyResponse is the Msg/UpdateGroupPolicyDecisionPolicy response type. */ + +export interface MsgUpdateGroupPolicyDecisionPolicyResponse {} +/** MsgUpdateGroupPolicyDecisionPolicyResponse is the Msg/UpdateGroupPolicyDecisionPolicy response type. */ + +export interface MsgUpdateGroupPolicyDecisionPolicyResponseSDKType {} +/** MsgUpdateGroupPolicyMetadata is the Msg/UpdateGroupPolicyMetadata request type. */ + +export interface MsgUpdateGroupPolicyMetadata { + /** admin is the account address of the group admin. */ + admin: string; + /** address is the account address of group policy. */ + + address: string; + /** metadata is the updated group policy metadata. */ + + metadata: string; +} +/** MsgUpdateGroupPolicyMetadata is the Msg/UpdateGroupPolicyMetadata request type. */ + +export interface MsgUpdateGroupPolicyMetadataSDKType { + admin: string; + address: string; + metadata: string; +} +/** MsgUpdateGroupPolicyMetadataResponse is the Msg/UpdateGroupPolicyMetadata response type. */ + +export interface MsgUpdateGroupPolicyMetadataResponse {} +/** MsgUpdateGroupPolicyMetadataResponse is the Msg/UpdateGroupPolicyMetadata response type. */ + +export interface MsgUpdateGroupPolicyMetadataResponseSDKType {} +/** MsgSubmitProposal is the Msg/SubmitProposal request type. */ + +export interface MsgSubmitProposal { + /** address is the account address of group policy. */ + address: string; + /** + * proposers are the account addresses of the proposers. + * Proposers signatures will be counted as yes votes. + */ + + proposers: string[]; + /** metadata is any arbitrary metadata to attached to the proposal. */ + + metadata: string; + /** messages is a list of `sdk.Msg`s that will be executed if the proposal passes. */ + + messages: Any[]; + /** + * exec defines the mode of execution of the proposal, + * whether it should be executed immediately on creation or not. + * If so, proposers signatures are considered as Yes votes. + */ + + exec: Exec; +} +/** MsgSubmitProposal is the Msg/SubmitProposal request type. */ + +export interface MsgSubmitProposalSDKType { + address: string; + proposers: string[]; + metadata: string; + messages: AnySDKType[]; + exec: Exec; +} +/** MsgSubmitProposalResponse is the Msg/SubmitProposal response type. */ + +export interface MsgSubmitProposalResponse { + /** proposal is the unique ID of the proposal. */ + proposalId: Long; +} +/** MsgSubmitProposalResponse is the Msg/SubmitProposal response type. */ + +export interface MsgSubmitProposalResponseSDKType { + proposal_id: Long; +} +/** MsgWithdrawProposal is the Msg/WithdrawProposal request type. */ + +export interface MsgWithdrawProposal { + /** proposal is the unique ID of the proposal. */ + proposalId: Long; + /** address is the admin of the group policy or one of the proposer of the proposal. */ + + address: string; +} +/** MsgWithdrawProposal is the Msg/WithdrawProposal request type. */ + +export interface MsgWithdrawProposalSDKType { + proposal_id: Long; + address: string; +} +/** MsgWithdrawProposalResponse is the Msg/WithdrawProposal response type. */ + +export interface MsgWithdrawProposalResponse {} +/** MsgWithdrawProposalResponse is the Msg/WithdrawProposal response type. */ + +export interface MsgWithdrawProposalResponseSDKType {} +/** MsgVote is the Msg/Vote request type. */ + +export interface MsgVote { + /** proposal is the unique ID of the proposal. */ + proposalId: Long; + /** voter is the voter account address. */ + + voter: string; + /** option is the voter's choice on the proposal. */ + + option: VoteOption; + /** metadata is any arbitrary metadata to attached to the vote. */ + + metadata: string; + /** + * exec defines whether the proposal should be executed + * immediately after voting or not. + */ + + exec: Exec; +} +/** MsgVote is the Msg/Vote request type. */ + +export interface MsgVoteSDKType { + proposal_id: Long; + voter: string; + option: VoteOption; + metadata: string; + exec: Exec; +} +/** MsgVoteResponse is the Msg/Vote response type. */ + +export interface MsgVoteResponse {} +/** MsgVoteResponse is the Msg/Vote response type. */ + +export interface MsgVoteResponseSDKType {} +/** MsgExec is the Msg/Exec request type. */ + +export interface MsgExec { + /** proposal is the unique ID of the proposal. */ + proposalId: Long; + /** signer is the account address used to execute the proposal. */ + + signer: string; +} +/** MsgExec is the Msg/Exec request type. */ + +export interface MsgExecSDKType { + proposal_id: Long; + signer: string; +} +/** MsgExecResponse is the Msg/Exec request type. */ + +export interface MsgExecResponse {} +/** MsgExecResponse is the Msg/Exec request type. */ + +export interface MsgExecResponseSDKType {} +/** MsgLeaveGroup is the Msg/LeaveGroup request type. */ + +export interface MsgLeaveGroup { + /** address is the account address of the group member. */ + address: string; + /** group_id is the unique ID of the group. */ + + groupId: Long; +} +/** MsgLeaveGroup is the Msg/LeaveGroup request type. */ + +export interface MsgLeaveGroupSDKType { + address: string; + group_id: Long; +} +/** MsgLeaveGroupResponse is the Msg/LeaveGroup response type. */ + +export interface MsgLeaveGroupResponse {} +/** MsgLeaveGroupResponse is the Msg/LeaveGroup response type. */ + +export interface MsgLeaveGroupResponseSDKType {} + +function createBaseMsgCreateGroup(): MsgCreateGroup { + return { + admin: "", + members: [], + metadata: "" + }; +} + +export const MsgCreateGroup = { + encode(message: MsgCreateGroup, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.admin !== "") { + writer.uint32(10).string(message.admin); + } + + for (const v of message.members) { + Member.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.metadata !== "") { + writer.uint32(26).string(message.metadata); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateGroup { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateGroup(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.admin = reader.string(); + break; + + case 2: + message.members.push(Member.decode(reader, reader.uint32())); + break; + + case 3: + message.metadata = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCreateGroup { + const message = createBaseMsgCreateGroup(); + message.admin = object.admin ?? ""; + message.members = object.members?.map(e => Member.fromPartial(e)) || []; + message.metadata = object.metadata ?? ""; + return message; + } + +}; + +function createBaseMsgCreateGroupResponse(): MsgCreateGroupResponse { + return { + groupId: Long.UZERO + }; +} + +export const MsgCreateGroupResponse = { + encode(message: MsgCreateGroupResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.groupId.isZero()) { + writer.uint32(8).uint64(message.groupId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateGroupResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateGroupResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groupId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCreateGroupResponse { + const message = createBaseMsgCreateGroupResponse(); + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + return message; + } + +}; + +function createBaseMsgUpdateGroupMembers(): MsgUpdateGroupMembers { + return { + admin: "", + groupId: Long.UZERO, + memberUpdates: [] + }; +} + +export const MsgUpdateGroupMembers = { + encode(message: MsgUpdateGroupMembers, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.admin !== "") { + writer.uint32(10).string(message.admin); + } + + if (!message.groupId.isZero()) { + writer.uint32(16).uint64(message.groupId); + } + + for (const v of message.memberUpdates) { + Member.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupMembers { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateGroupMembers(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.admin = reader.string(); + break; + + case 2: + message.groupId = (reader.uint64() as Long); + break; + + case 3: + message.memberUpdates.push(Member.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateGroupMembers { + const message = createBaseMsgUpdateGroupMembers(); + message.admin = object.admin ?? ""; + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + message.memberUpdates = object.memberUpdates?.map(e => Member.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgUpdateGroupMembersResponse(): MsgUpdateGroupMembersResponse { + return {}; +} + +export const MsgUpdateGroupMembersResponse = { + encode(_: MsgUpdateGroupMembersResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupMembersResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateGroupMembersResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateGroupMembersResponse { + const message = createBaseMsgUpdateGroupMembersResponse(); + return message; + } + +}; + +function createBaseMsgUpdateGroupAdmin(): MsgUpdateGroupAdmin { + return { + admin: "", + groupId: Long.UZERO, + newAdmin: "" + }; +} + +export const MsgUpdateGroupAdmin = { + encode(message: MsgUpdateGroupAdmin, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.admin !== "") { + writer.uint32(10).string(message.admin); + } + + if (!message.groupId.isZero()) { + writer.uint32(16).uint64(message.groupId); + } + + if (message.newAdmin !== "") { + writer.uint32(26).string(message.newAdmin); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupAdmin { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateGroupAdmin(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.admin = reader.string(); + break; + + case 2: + message.groupId = (reader.uint64() as Long); + break; + + case 3: + message.newAdmin = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateGroupAdmin { + const message = createBaseMsgUpdateGroupAdmin(); + message.admin = object.admin ?? ""; + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + message.newAdmin = object.newAdmin ?? ""; + return message; + } + +}; + +function createBaseMsgUpdateGroupAdminResponse(): MsgUpdateGroupAdminResponse { + return {}; +} + +export const MsgUpdateGroupAdminResponse = { + encode(_: MsgUpdateGroupAdminResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupAdminResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateGroupAdminResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateGroupAdminResponse { + const message = createBaseMsgUpdateGroupAdminResponse(); + return message; + } + +}; + +function createBaseMsgUpdateGroupMetadata(): MsgUpdateGroupMetadata { + return { + admin: "", + groupId: Long.UZERO, + metadata: "" + }; +} + +export const MsgUpdateGroupMetadata = { + encode(message: MsgUpdateGroupMetadata, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.admin !== "") { + writer.uint32(10).string(message.admin); + } + + if (!message.groupId.isZero()) { + writer.uint32(16).uint64(message.groupId); + } + + if (message.metadata !== "") { + writer.uint32(26).string(message.metadata); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupMetadata { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateGroupMetadata(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.admin = reader.string(); + break; + + case 2: + message.groupId = (reader.uint64() as Long); + break; + + case 3: + message.metadata = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateGroupMetadata { + const message = createBaseMsgUpdateGroupMetadata(); + message.admin = object.admin ?? ""; + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + message.metadata = object.metadata ?? ""; + return message; + } + +}; + +function createBaseMsgUpdateGroupMetadataResponse(): MsgUpdateGroupMetadataResponse { + return {}; +} + +export const MsgUpdateGroupMetadataResponse = { + encode(_: MsgUpdateGroupMetadataResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupMetadataResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateGroupMetadataResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateGroupMetadataResponse { + const message = createBaseMsgUpdateGroupMetadataResponse(); + return message; + } + +}; + +function createBaseMsgCreateGroupPolicy(): MsgCreateGroupPolicy { + return { + admin: "", + groupId: Long.UZERO, + metadata: "", + decisionPolicy: undefined + }; +} + +export const MsgCreateGroupPolicy = { + encode(message: MsgCreateGroupPolicy, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.admin !== "") { + writer.uint32(10).string(message.admin); + } + + if (!message.groupId.isZero()) { + writer.uint32(16).uint64(message.groupId); + } + + if (message.metadata !== "") { + writer.uint32(26).string(message.metadata); + } + + if (message.decisionPolicy !== undefined) { + Any.encode(message.decisionPolicy, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateGroupPolicy { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateGroupPolicy(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.admin = reader.string(); + break; + + case 2: + message.groupId = (reader.uint64() as Long); + break; + + case 3: + message.metadata = reader.string(); + break; + + case 4: + message.decisionPolicy = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCreateGroupPolicy { + const message = createBaseMsgCreateGroupPolicy(); + message.admin = object.admin ?? ""; + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + message.metadata = object.metadata ?? ""; + message.decisionPolicy = object.decisionPolicy !== undefined && object.decisionPolicy !== null ? Any.fromPartial(object.decisionPolicy) : undefined; + return message; + } + +}; + +function createBaseMsgCreateGroupPolicyResponse(): MsgCreateGroupPolicyResponse { + return { + address: "" + }; +} + +export const MsgCreateGroupPolicyResponse = { + encode(message: MsgCreateGroupPolicyResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateGroupPolicyResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateGroupPolicyResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCreateGroupPolicyResponse { + const message = createBaseMsgCreateGroupPolicyResponse(); + message.address = object.address ?? ""; + return message; + } + +}; + +function createBaseMsgUpdateGroupPolicyAdmin(): MsgUpdateGroupPolicyAdmin { + return { + admin: "", + address: "", + newAdmin: "" + }; +} + +export const MsgUpdateGroupPolicyAdmin = { + encode(message: MsgUpdateGroupPolicyAdmin, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.admin !== "") { + writer.uint32(10).string(message.admin); + } + + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + + if (message.newAdmin !== "") { + writer.uint32(26).string(message.newAdmin); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupPolicyAdmin { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateGroupPolicyAdmin(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.admin = reader.string(); + break; + + case 2: + message.address = reader.string(); + break; + + case 3: + message.newAdmin = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateGroupPolicyAdmin { + const message = createBaseMsgUpdateGroupPolicyAdmin(); + message.admin = object.admin ?? ""; + message.address = object.address ?? ""; + message.newAdmin = object.newAdmin ?? ""; + return message; + } + +}; + +function createBaseMsgCreateGroupWithPolicy(): MsgCreateGroupWithPolicy { + return { + admin: "", + members: [], + groupMetadata: "", + groupPolicyMetadata: "", + groupPolicyAsAdmin: false, + decisionPolicy: undefined + }; +} + +export const MsgCreateGroupWithPolicy = { + encode(message: MsgCreateGroupWithPolicy, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.admin !== "") { + writer.uint32(10).string(message.admin); + } + + for (const v of message.members) { + Member.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.groupMetadata !== "") { + writer.uint32(26).string(message.groupMetadata); + } + + if (message.groupPolicyMetadata !== "") { + writer.uint32(34).string(message.groupPolicyMetadata); + } + + if (message.groupPolicyAsAdmin === true) { + writer.uint32(40).bool(message.groupPolicyAsAdmin); + } + + if (message.decisionPolicy !== undefined) { + Any.encode(message.decisionPolicy, writer.uint32(50).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateGroupWithPolicy { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateGroupWithPolicy(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.admin = reader.string(); + break; + + case 2: + message.members.push(Member.decode(reader, reader.uint32())); + break; + + case 3: + message.groupMetadata = reader.string(); + break; + + case 4: + message.groupPolicyMetadata = reader.string(); + break; + + case 5: + message.groupPolicyAsAdmin = reader.bool(); + break; + + case 6: + message.decisionPolicy = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCreateGroupWithPolicy { + const message = createBaseMsgCreateGroupWithPolicy(); + message.admin = object.admin ?? ""; + message.members = object.members?.map(e => Member.fromPartial(e)) || []; + message.groupMetadata = object.groupMetadata ?? ""; + message.groupPolicyMetadata = object.groupPolicyMetadata ?? ""; + message.groupPolicyAsAdmin = object.groupPolicyAsAdmin ?? false; + message.decisionPolicy = object.decisionPolicy !== undefined && object.decisionPolicy !== null ? Any.fromPartial(object.decisionPolicy) : undefined; + return message; + } + +}; + +function createBaseMsgCreateGroupWithPolicyResponse(): MsgCreateGroupWithPolicyResponse { + return { + groupId: Long.UZERO, + groupPolicyAddress: "" + }; +} + +export const MsgCreateGroupWithPolicyResponse = { + encode(message: MsgCreateGroupWithPolicyResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.groupId.isZero()) { + writer.uint32(8).uint64(message.groupId); + } + + if (message.groupPolicyAddress !== "") { + writer.uint32(18).string(message.groupPolicyAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateGroupWithPolicyResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateGroupWithPolicyResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groupId = (reader.uint64() as Long); + break; + + case 2: + message.groupPolicyAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCreateGroupWithPolicyResponse { + const message = createBaseMsgCreateGroupWithPolicyResponse(); + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + message.groupPolicyAddress = object.groupPolicyAddress ?? ""; + return message; + } + +}; + +function createBaseMsgUpdateGroupPolicyAdminResponse(): MsgUpdateGroupPolicyAdminResponse { + return {}; +} + +export const MsgUpdateGroupPolicyAdminResponse = { + encode(_: MsgUpdateGroupPolicyAdminResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupPolicyAdminResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateGroupPolicyAdminResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateGroupPolicyAdminResponse { + const message = createBaseMsgUpdateGroupPolicyAdminResponse(); + return message; + } + +}; + +function createBaseMsgUpdateGroupPolicyDecisionPolicy(): MsgUpdateGroupPolicyDecisionPolicy { + return { + admin: "", + address: "", + decisionPolicy: undefined + }; +} + +export const MsgUpdateGroupPolicyDecisionPolicy = { + encode(message: MsgUpdateGroupPolicyDecisionPolicy, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.admin !== "") { + writer.uint32(10).string(message.admin); + } + + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + + if (message.decisionPolicy !== undefined) { + Any.encode(message.decisionPolicy, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupPolicyDecisionPolicy { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateGroupPolicyDecisionPolicy(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.admin = reader.string(); + break; + + case 2: + message.address = reader.string(); + break; + + case 3: + message.decisionPolicy = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateGroupPolicyDecisionPolicy { + const message = createBaseMsgUpdateGroupPolicyDecisionPolicy(); + message.admin = object.admin ?? ""; + message.address = object.address ?? ""; + message.decisionPolicy = object.decisionPolicy !== undefined && object.decisionPolicy !== null ? Any.fromPartial(object.decisionPolicy) : undefined; + return message; + } + +}; + +function createBaseMsgUpdateGroupPolicyDecisionPolicyResponse(): MsgUpdateGroupPolicyDecisionPolicyResponse { + return {}; +} + +export const MsgUpdateGroupPolicyDecisionPolicyResponse = { + encode(_: MsgUpdateGroupPolicyDecisionPolicyResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupPolicyDecisionPolicyResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateGroupPolicyDecisionPolicyResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateGroupPolicyDecisionPolicyResponse { + const message = createBaseMsgUpdateGroupPolicyDecisionPolicyResponse(); + return message; + } + +}; + +function createBaseMsgUpdateGroupPolicyMetadata(): MsgUpdateGroupPolicyMetadata { + return { + admin: "", + address: "", + metadata: "" + }; +} + +export const MsgUpdateGroupPolicyMetadata = { + encode(message: MsgUpdateGroupPolicyMetadata, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.admin !== "") { + writer.uint32(10).string(message.admin); + } + + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + + if (message.metadata !== "") { + writer.uint32(26).string(message.metadata); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupPolicyMetadata { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateGroupPolicyMetadata(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.admin = reader.string(); + break; + + case 2: + message.address = reader.string(); + break; + + case 3: + message.metadata = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateGroupPolicyMetadata { + const message = createBaseMsgUpdateGroupPolicyMetadata(); + message.admin = object.admin ?? ""; + message.address = object.address ?? ""; + message.metadata = object.metadata ?? ""; + return message; + } + +}; + +function createBaseMsgUpdateGroupPolicyMetadataResponse(): MsgUpdateGroupPolicyMetadataResponse { + return {}; +} + +export const MsgUpdateGroupPolicyMetadataResponse = { + encode(_: MsgUpdateGroupPolicyMetadataResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateGroupPolicyMetadataResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateGroupPolicyMetadataResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateGroupPolicyMetadataResponse { + const message = createBaseMsgUpdateGroupPolicyMetadataResponse(); + return message; + } + +}; + +function createBaseMsgSubmitProposal(): MsgSubmitProposal { + return { + address: "", + proposers: [], + metadata: "", + messages: [], + exec: 0 + }; +} + +export const MsgSubmitProposal = { + encode(message: MsgSubmitProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + for (const v of message.proposers) { + writer.uint32(18).string(v!); + } + + if (message.metadata !== "") { + writer.uint32(26).string(message.metadata); + } + + for (const v of message.messages) { + Any.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + if (message.exec !== 0) { + writer.uint32(40).int32(message.exec); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSubmitProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.proposers.push(reader.string()); + break; + + case 3: + message.metadata = reader.string(); + break; + + case 4: + message.messages.push(Any.decode(reader, reader.uint32())); + break; + + case 5: + message.exec = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSubmitProposal { + const message = createBaseMsgSubmitProposal(); + message.address = object.address ?? ""; + message.proposers = object.proposers?.map(e => e) || []; + message.metadata = object.metadata ?? ""; + message.messages = object.messages?.map(e => Any.fromPartial(e)) || []; + message.exec = object.exec ?? 0; + return message; + } + +}; + +function createBaseMsgSubmitProposalResponse(): MsgSubmitProposalResponse { + return { + proposalId: Long.UZERO + }; +} + +export const MsgSubmitProposalResponse = { + encode(message: MsgSubmitProposalResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitProposalResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSubmitProposalResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSubmitProposalResponse { + const message = createBaseMsgSubmitProposalResponse(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + return message; + } + +}; + +function createBaseMsgWithdrawProposal(): MsgWithdrawProposal { + return { + proposalId: Long.UZERO, + address: "" + }; +} + +export const MsgWithdrawProposal = { + encode(message: MsgWithdrawProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgWithdrawProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgWithdrawProposal { + const message = createBaseMsgWithdrawProposal(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.address = object.address ?? ""; + return message; + } + +}; + +function createBaseMsgWithdrawProposalResponse(): MsgWithdrawProposalResponse { + return {}; +} + +export const MsgWithdrawProposalResponse = { + encode(_: MsgWithdrawProposalResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgWithdrawProposalResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgWithdrawProposalResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgWithdrawProposalResponse { + const message = createBaseMsgWithdrawProposalResponse(); + return message; + } + +}; + +function createBaseMsgVote(): MsgVote { + return { + proposalId: Long.UZERO, + voter: "", + option: 0, + metadata: "", + exec: 0 + }; +} + +export const MsgVote = { + encode(message: MsgVote, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + if (message.option !== 0) { + writer.uint32(24).int32(message.option); + } + + if (message.metadata !== "") { + writer.uint32(34).string(message.metadata); + } + + if (message.exec !== 0) { + writer.uint32(40).int32(message.exec); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVote { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgVote(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.voter = reader.string(); + break; + + case 3: + message.option = (reader.int32() as any); + break; + + case 4: + message.metadata = reader.string(); + break; + + case 5: + message.exec = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgVote { + const message = createBaseMsgVote(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.voter = object.voter ?? ""; + message.option = object.option ?? 0; + message.metadata = object.metadata ?? ""; + message.exec = object.exec ?? 0; + return message; + } + +}; + +function createBaseMsgVoteResponse(): MsgVoteResponse { + return {}; +} + +export const MsgVoteResponse = { + encode(_: MsgVoteResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgVoteResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgVoteResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgVoteResponse { + const message = createBaseMsgVoteResponse(); + return message; + } + +}; + +function createBaseMsgExec(): MsgExec { + return { + proposalId: Long.UZERO, + signer: "" + }; +} + +export const MsgExec = { + encode(message: MsgExec, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.signer !== "") { + writer.uint32(18).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExec { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgExec(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgExec { + const message = createBaseMsgExec(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgExecResponse(): MsgExecResponse { + return {}; +} + +export const MsgExecResponse = { + encode(_: MsgExecResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExecResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgExecResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgExecResponse { + const message = createBaseMsgExecResponse(); + return message; + } + +}; + +function createBaseMsgLeaveGroup(): MsgLeaveGroup { + return { + address: "", + groupId: Long.UZERO + }; +} + +export const MsgLeaveGroup = { + encode(message: MsgLeaveGroup, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (!message.groupId.isZero()) { + writer.uint32(16).uint64(message.groupId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgLeaveGroup { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgLeaveGroup(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.groupId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgLeaveGroup { + const message = createBaseMsgLeaveGroup(); + message.address = object.address ?? ""; + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + return message; + } + +}; + +function createBaseMsgLeaveGroupResponse(): MsgLeaveGroupResponse { + return {}; +} + +export const MsgLeaveGroupResponse = { + encode(_: MsgLeaveGroupResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgLeaveGroupResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgLeaveGroupResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgLeaveGroupResponse { + const message = createBaseMsgLeaveGroupResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/group/v1/types.ts b/packages/codegen/src/cosmos/group/v1/types.ts new file mode 100644 index 00000000..d9cb4a2a --- /dev/null +++ b/packages/codegen/src/cosmos/group/v1/types.ts @@ -0,0 +1,1461 @@ +import { Timestamp } from "../../../google/protobuf/timestamp"; +import { Duration, DurationSDKType } from "../../../google/protobuf/duration"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { toTimestamp, fromTimestamp, DeepPartial, Long } from "../../../helpers"; +/** VoteOption enumerates the valid vote options for a given proposal. */ + +export enum VoteOption { + /** VOTE_OPTION_UNSPECIFIED - VOTE_OPTION_UNSPECIFIED defines a no-op vote option. */ + VOTE_OPTION_UNSPECIFIED = 0, + + /** VOTE_OPTION_YES - VOTE_OPTION_YES defines a yes vote option. */ + VOTE_OPTION_YES = 1, + + /** VOTE_OPTION_ABSTAIN - VOTE_OPTION_ABSTAIN defines an abstain vote option. */ + VOTE_OPTION_ABSTAIN = 2, + + /** VOTE_OPTION_NO - VOTE_OPTION_NO defines a no vote option. */ + VOTE_OPTION_NO = 3, + + /** VOTE_OPTION_NO_WITH_VETO - VOTE_OPTION_NO_WITH_VETO defines a no with veto vote option. */ + VOTE_OPTION_NO_WITH_VETO = 4, + UNRECOGNIZED = -1, +} +export const VoteOptionSDKType = VoteOption; +export function voteOptionFromJSON(object: any): VoteOption { + switch (object) { + case 0: + case "VOTE_OPTION_UNSPECIFIED": + return VoteOption.VOTE_OPTION_UNSPECIFIED; + + case 1: + case "VOTE_OPTION_YES": + return VoteOption.VOTE_OPTION_YES; + + case 2: + case "VOTE_OPTION_ABSTAIN": + return VoteOption.VOTE_OPTION_ABSTAIN; + + case 3: + case "VOTE_OPTION_NO": + return VoteOption.VOTE_OPTION_NO; + + case 4: + case "VOTE_OPTION_NO_WITH_VETO": + return VoteOption.VOTE_OPTION_NO_WITH_VETO; + + case -1: + case "UNRECOGNIZED": + default: + return VoteOption.UNRECOGNIZED; + } +} +export function voteOptionToJSON(object: VoteOption): string { + switch (object) { + case VoteOption.VOTE_OPTION_UNSPECIFIED: + return "VOTE_OPTION_UNSPECIFIED"; + + case VoteOption.VOTE_OPTION_YES: + return "VOTE_OPTION_YES"; + + case VoteOption.VOTE_OPTION_ABSTAIN: + return "VOTE_OPTION_ABSTAIN"; + + case VoteOption.VOTE_OPTION_NO: + return "VOTE_OPTION_NO"; + + case VoteOption.VOTE_OPTION_NO_WITH_VETO: + return "VOTE_OPTION_NO_WITH_VETO"; + + case VoteOption.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** ProposalStatus defines proposal statuses. */ + +export enum ProposalStatus { + /** PROPOSAL_STATUS_UNSPECIFIED - An empty value is invalid and not allowed. */ + PROPOSAL_STATUS_UNSPECIFIED = 0, + + /** PROPOSAL_STATUS_SUBMITTED - Initial status of a proposal when persisted. */ + PROPOSAL_STATUS_SUBMITTED = 1, + + /** PROPOSAL_STATUS_CLOSED - Final status of a proposal when the final tally was executed. */ + PROPOSAL_STATUS_CLOSED = 2, + + /** PROPOSAL_STATUS_ABORTED - Final status of a proposal when the group was modified before the final tally. */ + PROPOSAL_STATUS_ABORTED = 3, + + /** + * PROPOSAL_STATUS_WITHDRAWN - A proposal can be deleted before the voting start time by the owner. When this happens the final status + * is Withdrawn. + */ + PROPOSAL_STATUS_WITHDRAWN = 4, + UNRECOGNIZED = -1, +} +export const ProposalStatusSDKType = ProposalStatus; +export function proposalStatusFromJSON(object: any): ProposalStatus { + switch (object) { + case 0: + case "PROPOSAL_STATUS_UNSPECIFIED": + return ProposalStatus.PROPOSAL_STATUS_UNSPECIFIED; + + case 1: + case "PROPOSAL_STATUS_SUBMITTED": + return ProposalStatus.PROPOSAL_STATUS_SUBMITTED; + + case 2: + case "PROPOSAL_STATUS_CLOSED": + return ProposalStatus.PROPOSAL_STATUS_CLOSED; + + case 3: + case "PROPOSAL_STATUS_ABORTED": + return ProposalStatus.PROPOSAL_STATUS_ABORTED; + + case 4: + case "PROPOSAL_STATUS_WITHDRAWN": + return ProposalStatus.PROPOSAL_STATUS_WITHDRAWN; + + case -1: + case "UNRECOGNIZED": + default: + return ProposalStatus.UNRECOGNIZED; + } +} +export function proposalStatusToJSON(object: ProposalStatus): string { + switch (object) { + case ProposalStatus.PROPOSAL_STATUS_UNSPECIFIED: + return "PROPOSAL_STATUS_UNSPECIFIED"; + + case ProposalStatus.PROPOSAL_STATUS_SUBMITTED: + return "PROPOSAL_STATUS_SUBMITTED"; + + case ProposalStatus.PROPOSAL_STATUS_CLOSED: + return "PROPOSAL_STATUS_CLOSED"; + + case ProposalStatus.PROPOSAL_STATUS_ABORTED: + return "PROPOSAL_STATUS_ABORTED"; + + case ProposalStatus.PROPOSAL_STATUS_WITHDRAWN: + return "PROPOSAL_STATUS_WITHDRAWN"; + + case ProposalStatus.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** ProposalResult defines types of proposal results. */ + +export enum ProposalResult { + /** PROPOSAL_RESULT_UNSPECIFIED - An empty value is invalid and not allowed */ + PROPOSAL_RESULT_UNSPECIFIED = 0, + + /** PROPOSAL_RESULT_UNFINALIZED - Until a final tally has happened the status is unfinalized */ + PROPOSAL_RESULT_UNFINALIZED = 1, + + /** PROPOSAL_RESULT_ACCEPTED - Final result of the tally */ + PROPOSAL_RESULT_ACCEPTED = 2, + + /** PROPOSAL_RESULT_REJECTED - Final result of the tally */ + PROPOSAL_RESULT_REJECTED = 3, + UNRECOGNIZED = -1, +} +export const ProposalResultSDKType = ProposalResult; +export function proposalResultFromJSON(object: any): ProposalResult { + switch (object) { + case 0: + case "PROPOSAL_RESULT_UNSPECIFIED": + return ProposalResult.PROPOSAL_RESULT_UNSPECIFIED; + + case 1: + case "PROPOSAL_RESULT_UNFINALIZED": + return ProposalResult.PROPOSAL_RESULT_UNFINALIZED; + + case 2: + case "PROPOSAL_RESULT_ACCEPTED": + return ProposalResult.PROPOSAL_RESULT_ACCEPTED; + + case 3: + case "PROPOSAL_RESULT_REJECTED": + return ProposalResult.PROPOSAL_RESULT_REJECTED; + + case -1: + case "UNRECOGNIZED": + default: + return ProposalResult.UNRECOGNIZED; + } +} +export function proposalResultToJSON(object: ProposalResult): string { + switch (object) { + case ProposalResult.PROPOSAL_RESULT_UNSPECIFIED: + return "PROPOSAL_RESULT_UNSPECIFIED"; + + case ProposalResult.PROPOSAL_RESULT_UNFINALIZED: + return "PROPOSAL_RESULT_UNFINALIZED"; + + case ProposalResult.PROPOSAL_RESULT_ACCEPTED: + return "PROPOSAL_RESULT_ACCEPTED"; + + case ProposalResult.PROPOSAL_RESULT_REJECTED: + return "PROPOSAL_RESULT_REJECTED"; + + case ProposalResult.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** ProposalExecutorResult defines types of proposal executor results. */ + +export enum ProposalExecutorResult { + /** PROPOSAL_EXECUTOR_RESULT_UNSPECIFIED - An empty value is not allowed. */ + PROPOSAL_EXECUTOR_RESULT_UNSPECIFIED = 0, + + /** PROPOSAL_EXECUTOR_RESULT_NOT_RUN - We have not yet run the executor. */ + PROPOSAL_EXECUTOR_RESULT_NOT_RUN = 1, + + /** PROPOSAL_EXECUTOR_RESULT_SUCCESS - The executor was successful and proposed action updated state. */ + PROPOSAL_EXECUTOR_RESULT_SUCCESS = 2, + + /** PROPOSAL_EXECUTOR_RESULT_FAILURE - The executor returned an error and proposed action didn't update state. */ + PROPOSAL_EXECUTOR_RESULT_FAILURE = 3, + UNRECOGNIZED = -1, +} +export const ProposalExecutorResultSDKType = ProposalExecutorResult; +export function proposalExecutorResultFromJSON(object: any): ProposalExecutorResult { + switch (object) { + case 0: + case "PROPOSAL_EXECUTOR_RESULT_UNSPECIFIED": + return ProposalExecutorResult.PROPOSAL_EXECUTOR_RESULT_UNSPECIFIED; + + case 1: + case "PROPOSAL_EXECUTOR_RESULT_NOT_RUN": + return ProposalExecutorResult.PROPOSAL_EXECUTOR_RESULT_NOT_RUN; + + case 2: + case "PROPOSAL_EXECUTOR_RESULT_SUCCESS": + return ProposalExecutorResult.PROPOSAL_EXECUTOR_RESULT_SUCCESS; + + case 3: + case "PROPOSAL_EXECUTOR_RESULT_FAILURE": + return ProposalExecutorResult.PROPOSAL_EXECUTOR_RESULT_FAILURE; + + case -1: + case "UNRECOGNIZED": + default: + return ProposalExecutorResult.UNRECOGNIZED; + } +} +export function proposalExecutorResultToJSON(object: ProposalExecutorResult): string { + switch (object) { + case ProposalExecutorResult.PROPOSAL_EXECUTOR_RESULT_UNSPECIFIED: + return "PROPOSAL_EXECUTOR_RESULT_UNSPECIFIED"; + + case ProposalExecutorResult.PROPOSAL_EXECUTOR_RESULT_NOT_RUN: + return "PROPOSAL_EXECUTOR_RESULT_NOT_RUN"; + + case ProposalExecutorResult.PROPOSAL_EXECUTOR_RESULT_SUCCESS: + return "PROPOSAL_EXECUTOR_RESULT_SUCCESS"; + + case ProposalExecutorResult.PROPOSAL_EXECUTOR_RESULT_FAILURE: + return "PROPOSAL_EXECUTOR_RESULT_FAILURE"; + + case ProposalExecutorResult.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * Member represents a group member with an account address, + * non-zero weight and metadata. + */ + +export interface Member { + /** address is the member's account address. */ + address: string; + /** weight is the member's voting weight that should be greater than 0. */ + + weight: string; + /** metadata is any arbitrary metadata to attached to the member. */ + + metadata: string; + /** added_at is a timestamp specifying when a member was added. */ + + addedAt?: Date; +} +/** + * Member represents a group member with an account address, + * non-zero weight and metadata. + */ + +export interface MemberSDKType { + address: string; + weight: string; + metadata: string; + added_at?: Date; +} +/** Members defines a repeated slice of Member objects. */ + +export interface Members { + /** members is the list of members. */ + members: Member[]; +} +/** Members defines a repeated slice of Member objects. */ + +export interface MembersSDKType { + members: MemberSDKType[]; +} +/** ThresholdDecisionPolicy implements the DecisionPolicy interface */ + +export interface ThresholdDecisionPolicy { + /** threshold is the minimum weighted sum of yes votes that must be met or exceeded for a proposal to succeed. */ + threshold: string; + /** windows defines the different windows for voting and execution. */ + + windows?: DecisionPolicyWindows; +} +/** ThresholdDecisionPolicy implements the DecisionPolicy interface */ + +export interface ThresholdDecisionPolicySDKType { + threshold: string; + windows?: DecisionPolicyWindowsSDKType; +} +/** PercentageDecisionPolicy implements the DecisionPolicy interface */ + +export interface PercentageDecisionPolicy { + /** percentage is the minimum percentage the weighted sum of yes votes must meet for a proposal to succeed. */ + percentage: string; + /** windows defines the different windows for voting and execution. */ + + windows?: DecisionPolicyWindows; +} +/** PercentageDecisionPolicy implements the DecisionPolicy interface */ + +export interface PercentageDecisionPolicySDKType { + percentage: string; + windows?: DecisionPolicyWindowsSDKType; +} +/** DecisionPolicyWindows defines the different windows for voting and execution. */ + +export interface DecisionPolicyWindows { + /** + * voting_period is the duration from submission of a proposal to the end of voting period + * Within this times votes can be submitted with MsgVote. + */ + votingPeriod?: Duration; + /** + * min_execution_period is the minimum duration after the proposal submission + * where members can start sending MsgExec. This means that the window for + * sending a MsgExec transaction is: + * `[ submission + min_execution_period ; submission + voting_period + max_execution_period]` + * where max_execution_period is a app-specific config, defined in the keeper. + * If not set, min_execution_period will default to 0. + * + * Please make sure to set a `min_execution_period` that is smaller than + * `voting_period + max_execution_period`, or else the above execution window + * is empty, meaning that all proposals created with this decision policy + * won't be able to be executed. + */ + + minExecutionPeriod?: Duration; +} +/** DecisionPolicyWindows defines the different windows for voting and execution. */ + +export interface DecisionPolicyWindowsSDKType { + voting_period?: DurationSDKType; + min_execution_period?: DurationSDKType; +} +/** GroupInfo represents the high-level on-chain information for a group. */ + +export interface GroupInfo { + /** id is the unique ID of the group. */ + id: Long; + /** admin is the account address of the group's admin. */ + + admin: string; + /** metadata is any arbitrary metadata to attached to the group. */ + + metadata: string; + /** + * version is used to track changes to a group's membership structure that + * would break existing proposals. Whenever any members weight is changed, + * or any member is added or removed this version is incremented and will + * cause proposals based on older versions of this group to fail + */ + + version: Long; + /** total_weight is the sum of the group members' weights. */ + + totalWeight: string; + /** created_at is a timestamp specifying when a group was created. */ + + createdAt?: Date; +} +/** GroupInfo represents the high-level on-chain information for a group. */ + +export interface GroupInfoSDKType { + id: Long; + admin: string; + metadata: string; + version: Long; + total_weight: string; + created_at?: Date; +} +/** GroupMember represents the relationship between a group and a member. */ + +export interface GroupMember { + /** group_id is the unique ID of the group. */ + groupId: Long; + /** member is the member data. */ + + member?: Member; +} +/** GroupMember represents the relationship between a group and a member. */ + +export interface GroupMemberSDKType { + group_id: Long; + member?: MemberSDKType; +} +/** GroupPolicyInfo represents the high-level on-chain information for a group policy. */ + +export interface GroupPolicyInfo { + /** address is the account address of group policy. */ + address: string; + /** group_id is the unique ID of the group. */ + + groupId: Long; + /** admin is the account address of the group admin. */ + + admin: string; + /** metadata is any arbitrary metadata to attached to the group policy. */ + + metadata: string; + /** + * version is used to track changes to a group's GroupPolicyInfo structure that + * would create a different result on a running proposal. + */ + + version: Long; + /** decision_policy specifies the group policy's decision policy. */ + + decisionPolicy?: Any; + /** created_at is a timestamp specifying when a group policy was created. */ + + createdAt?: Date; +} +/** GroupPolicyInfo represents the high-level on-chain information for a group policy. */ + +export interface GroupPolicyInfoSDKType { + address: string; + group_id: Long; + admin: string; + metadata: string; + version: Long; + decision_policy?: AnySDKType; + created_at?: Date; +} +/** + * Proposal defines a group proposal. Any member of a group can submit a proposal + * for a group policy to decide upon. + * A proposal consists of a set of `sdk.Msg`s that will be executed if the proposal + * passes as well as some optional metadata associated with the proposal. + */ + +export interface Proposal { + /** id is the unique id of the proposal. */ + id: Long; + /** address is the account address of group policy. */ + + address: string; + /** metadata is any arbitrary metadata to attached to the proposal. */ + + metadata: string; + /** proposers are the account addresses of the proposers. */ + + proposers: string[]; + /** submit_time is a timestamp specifying when a proposal was submitted. */ + + submitTime?: Date; + /** + * group_version tracks the version of the group that this proposal corresponds to. + * When group membership is changed, existing proposals from previous group versions will become invalid. + */ + + groupVersion: Long; + /** + * group_policy_version tracks the version of the group policy that this proposal corresponds to. + * When a decision policy is changed, existing proposals from previous policy versions will become invalid. + */ + + groupPolicyVersion: Long; + /** status represents the high level position in the life cycle of the proposal. Initial value is Submitted. */ + + status: ProposalStatus; + /** + * result is the final result based on the votes and election rule. Initial value is unfinalized. + * The result is persisted so that clients can always rely on this state and not have to replicate the logic. + */ + + result: ProposalResult; + /** + * final_tally_result contains the sums of all weighted votes for this + * proposal for each vote option, after tallying. When querying a proposal + * via gRPC, this field is not populated until the proposal's voting period + * has ended. + */ + + finalTallyResult?: TallyResult; + /** + * voting_period_end is the timestamp before which voting must be done. + * Unless a successfull MsgExec is called before (to execute a proposal whose + * tally is successful before the voting period ends), tallying will be done + * at this point, and the `final_tally_result`, as well + * as `status` and `result` fields will be accordingly updated. + */ + + votingPeriodEnd?: Date; + /** executor_result is the final result based on the votes and election rule. Initial value is NotRun. */ + + executorResult: ProposalExecutorResult; + /** messages is a list of Msgs that will be executed if the proposal passes. */ + + messages: Any[]; +} +/** + * Proposal defines a group proposal. Any member of a group can submit a proposal + * for a group policy to decide upon. + * A proposal consists of a set of `sdk.Msg`s that will be executed if the proposal + * passes as well as some optional metadata associated with the proposal. + */ + +export interface ProposalSDKType { + id: Long; + address: string; + metadata: string; + proposers: string[]; + submit_time?: Date; + group_version: Long; + group_policy_version: Long; + status: ProposalStatus; + result: ProposalResult; + final_tally_result?: TallyResultSDKType; + voting_period_end?: Date; + executor_result: ProposalExecutorResult; + messages: AnySDKType[]; +} +/** TallyResult represents the sum of weighted votes for each vote option. */ + +export interface TallyResult { + /** yes_count is the weighted sum of yes votes. */ + yesCount: string; + /** abstain_count is the weighted sum of abstainers. */ + + abstainCount: string; + /** no is the weighted sum of no votes. */ + + noCount: string; + /** no_with_veto_count is the weighted sum of veto. */ + + noWithVetoCount: string; +} +/** TallyResult represents the sum of weighted votes for each vote option. */ + +export interface TallyResultSDKType { + yes_count: string; + abstain_count: string; + no_count: string; + no_with_veto_count: string; +} +/** Vote represents a vote for a proposal. */ + +export interface Vote { + /** proposal is the unique ID of the proposal. */ + proposalId: Long; + /** voter is the account address of the voter. */ + + voter: string; + /** option is the voter's choice on the proposal. */ + + option: VoteOption; + /** metadata is any arbitrary metadata to attached to the vote. */ + + metadata: string; + /** submit_time is the timestamp when the vote was submitted. */ + + submitTime?: Date; +} +/** Vote represents a vote for a proposal. */ + +export interface VoteSDKType { + proposal_id: Long; + voter: string; + option: VoteOption; + metadata: string; + submit_time?: Date; +} + +function createBaseMember(): Member { + return { + address: "", + weight: "", + metadata: "", + addedAt: undefined + }; +} + +export const Member = { + encode(message: Member, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.weight !== "") { + writer.uint32(18).string(message.weight); + } + + if (message.metadata !== "") { + writer.uint32(26).string(message.metadata); + } + + if (message.addedAt !== undefined) { + Timestamp.encode(toTimestamp(message.addedAt), writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Member { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMember(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.weight = reader.string(); + break; + + case 3: + message.metadata = reader.string(); + break; + + case 4: + message.addedAt = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Member { + const message = createBaseMember(); + message.address = object.address ?? ""; + message.weight = object.weight ?? ""; + message.metadata = object.metadata ?? ""; + message.addedAt = object.addedAt ?? undefined; + return message; + } + +}; + +function createBaseMembers(): Members { + return { + members: [] + }; +} + +export const Members = { + encode(message: Members, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.members) { + Member.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Members { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMembers(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.members.push(Member.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Members { + const message = createBaseMembers(); + message.members = object.members?.map(e => Member.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseThresholdDecisionPolicy(): ThresholdDecisionPolicy { + return { + threshold: "", + windows: undefined + }; +} + +export const ThresholdDecisionPolicy = { + encode(message: ThresholdDecisionPolicy, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.threshold !== "") { + writer.uint32(10).string(message.threshold); + } + + if (message.windows !== undefined) { + DecisionPolicyWindows.encode(message.windows, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ThresholdDecisionPolicy { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseThresholdDecisionPolicy(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.threshold = reader.string(); + break; + + case 2: + message.windows = DecisionPolicyWindows.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ThresholdDecisionPolicy { + const message = createBaseThresholdDecisionPolicy(); + message.threshold = object.threshold ?? ""; + message.windows = object.windows !== undefined && object.windows !== null ? DecisionPolicyWindows.fromPartial(object.windows) : undefined; + return message; + } + +}; + +function createBasePercentageDecisionPolicy(): PercentageDecisionPolicy { + return { + percentage: "", + windows: undefined + }; +} + +export const PercentageDecisionPolicy = { + encode(message: PercentageDecisionPolicy, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.percentage !== "") { + writer.uint32(10).string(message.percentage); + } + + if (message.windows !== undefined) { + DecisionPolicyWindows.encode(message.windows, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PercentageDecisionPolicy { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePercentageDecisionPolicy(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.percentage = reader.string(); + break; + + case 2: + message.windows = DecisionPolicyWindows.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PercentageDecisionPolicy { + const message = createBasePercentageDecisionPolicy(); + message.percentage = object.percentage ?? ""; + message.windows = object.windows !== undefined && object.windows !== null ? DecisionPolicyWindows.fromPartial(object.windows) : undefined; + return message; + } + +}; + +function createBaseDecisionPolicyWindows(): DecisionPolicyWindows { + return { + votingPeriod: undefined, + minExecutionPeriod: undefined + }; +} + +export const DecisionPolicyWindows = { + encode(message: DecisionPolicyWindows, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.votingPeriod !== undefined) { + Duration.encode(message.votingPeriod, writer.uint32(10).fork()).ldelim(); + } + + if (message.minExecutionPeriod !== undefined) { + Duration.encode(message.minExecutionPeriod, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecisionPolicyWindows { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecisionPolicyWindows(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.votingPeriod = Duration.decode(reader, reader.uint32()); + break; + + case 2: + message.minExecutionPeriod = Duration.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DecisionPolicyWindows { + const message = createBaseDecisionPolicyWindows(); + message.votingPeriod = object.votingPeriod !== undefined && object.votingPeriod !== null ? Duration.fromPartial(object.votingPeriod) : undefined; + message.minExecutionPeriod = object.minExecutionPeriod !== undefined && object.minExecutionPeriod !== null ? Duration.fromPartial(object.minExecutionPeriod) : undefined; + return message; + } + +}; + +function createBaseGroupInfo(): GroupInfo { + return { + id: Long.UZERO, + admin: "", + metadata: "", + version: Long.UZERO, + totalWeight: "", + createdAt: undefined + }; +} + +export const GroupInfo = { + encode(message: GroupInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.id.isZero()) { + writer.uint32(8).uint64(message.id); + } + + if (message.admin !== "") { + writer.uint32(18).string(message.admin); + } + + if (message.metadata !== "") { + writer.uint32(26).string(message.metadata); + } + + if (!message.version.isZero()) { + writer.uint32(32).uint64(message.version); + } + + if (message.totalWeight !== "") { + writer.uint32(42).string(message.totalWeight); + } + + if (message.createdAt !== undefined) { + Timestamp.encode(toTimestamp(message.createdAt), writer.uint32(50).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GroupInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGroupInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.id = (reader.uint64() as Long); + break; + + case 2: + message.admin = reader.string(); + break; + + case 3: + message.metadata = reader.string(); + break; + + case 4: + message.version = (reader.uint64() as Long); + break; + + case 5: + message.totalWeight = reader.string(); + break; + + case 6: + message.createdAt = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GroupInfo { + const message = createBaseGroupInfo(); + message.id = object.id !== undefined && object.id !== null ? Long.fromValue(object.id) : Long.UZERO; + message.admin = object.admin ?? ""; + message.metadata = object.metadata ?? ""; + message.version = object.version !== undefined && object.version !== null ? Long.fromValue(object.version) : Long.UZERO; + message.totalWeight = object.totalWeight ?? ""; + message.createdAt = object.createdAt ?? undefined; + return message; + } + +}; + +function createBaseGroupMember(): GroupMember { + return { + groupId: Long.UZERO, + member: undefined + }; +} + +export const GroupMember = { + encode(message: GroupMember, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.groupId.isZero()) { + writer.uint32(8).uint64(message.groupId); + } + + if (message.member !== undefined) { + Member.encode(message.member, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GroupMember { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGroupMember(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.groupId = (reader.uint64() as Long); + break; + + case 2: + message.member = Member.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GroupMember { + const message = createBaseGroupMember(); + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + message.member = object.member !== undefined && object.member !== null ? Member.fromPartial(object.member) : undefined; + return message; + } + +}; + +function createBaseGroupPolicyInfo(): GroupPolicyInfo { + return { + address: "", + groupId: Long.UZERO, + admin: "", + metadata: "", + version: Long.UZERO, + decisionPolicy: undefined, + createdAt: undefined + }; +} + +export const GroupPolicyInfo = { + encode(message: GroupPolicyInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (!message.groupId.isZero()) { + writer.uint32(16).uint64(message.groupId); + } + + if (message.admin !== "") { + writer.uint32(26).string(message.admin); + } + + if (message.metadata !== "") { + writer.uint32(34).string(message.metadata); + } + + if (!message.version.isZero()) { + writer.uint32(40).uint64(message.version); + } + + if (message.decisionPolicy !== undefined) { + Any.encode(message.decisionPolicy, writer.uint32(50).fork()).ldelim(); + } + + if (message.createdAt !== undefined) { + Timestamp.encode(toTimestamp(message.createdAt), writer.uint32(58).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GroupPolicyInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGroupPolicyInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.groupId = (reader.uint64() as Long); + break; + + case 3: + message.admin = reader.string(); + break; + + case 4: + message.metadata = reader.string(); + break; + + case 5: + message.version = (reader.uint64() as Long); + break; + + case 6: + message.decisionPolicy = Any.decode(reader, reader.uint32()); + break; + + case 7: + message.createdAt = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GroupPolicyInfo { + const message = createBaseGroupPolicyInfo(); + message.address = object.address ?? ""; + message.groupId = object.groupId !== undefined && object.groupId !== null ? Long.fromValue(object.groupId) : Long.UZERO; + message.admin = object.admin ?? ""; + message.metadata = object.metadata ?? ""; + message.version = object.version !== undefined && object.version !== null ? Long.fromValue(object.version) : Long.UZERO; + message.decisionPolicy = object.decisionPolicy !== undefined && object.decisionPolicy !== null ? Any.fromPartial(object.decisionPolicy) : undefined; + message.createdAt = object.createdAt ?? undefined; + return message; + } + +}; + +function createBaseProposal(): Proposal { + return { + id: Long.UZERO, + address: "", + metadata: "", + proposers: [], + submitTime: undefined, + groupVersion: Long.UZERO, + groupPolicyVersion: Long.UZERO, + status: 0, + result: 0, + finalTallyResult: undefined, + votingPeriodEnd: undefined, + executorResult: 0, + messages: [] + }; +} + +export const Proposal = { + encode(message: Proposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.id.isZero()) { + writer.uint32(8).uint64(message.id); + } + + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + + if (message.metadata !== "") { + writer.uint32(26).string(message.metadata); + } + + for (const v of message.proposers) { + writer.uint32(34).string(v!); + } + + if (message.submitTime !== undefined) { + Timestamp.encode(toTimestamp(message.submitTime), writer.uint32(42).fork()).ldelim(); + } + + if (!message.groupVersion.isZero()) { + writer.uint32(48).uint64(message.groupVersion); + } + + if (!message.groupPolicyVersion.isZero()) { + writer.uint32(56).uint64(message.groupPolicyVersion); + } + + if (message.status !== 0) { + writer.uint32(64).int32(message.status); + } + + if (message.result !== 0) { + writer.uint32(72).int32(message.result); + } + + if (message.finalTallyResult !== undefined) { + TallyResult.encode(message.finalTallyResult, writer.uint32(82).fork()).ldelim(); + } + + if (message.votingPeriodEnd !== undefined) { + Timestamp.encode(toTimestamp(message.votingPeriodEnd), writer.uint32(90).fork()).ldelim(); + } + + if (message.executorResult !== 0) { + writer.uint32(96).int32(message.executorResult); + } + + for (const v of message.messages) { + Any.encode(v!, writer.uint32(106).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Proposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.id = (reader.uint64() as Long); + break; + + case 2: + message.address = reader.string(); + break; + + case 3: + message.metadata = reader.string(); + break; + + case 4: + message.proposers.push(reader.string()); + break; + + case 5: + message.submitTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 6: + message.groupVersion = (reader.uint64() as Long); + break; + + case 7: + message.groupPolicyVersion = (reader.uint64() as Long); + break; + + case 8: + message.status = (reader.int32() as any); + break; + + case 9: + message.result = (reader.int32() as any); + break; + + case 10: + message.finalTallyResult = TallyResult.decode(reader, reader.uint32()); + break; + + case 11: + message.votingPeriodEnd = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 12: + message.executorResult = (reader.int32() as any); + break; + + case 13: + message.messages.push(Any.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Proposal { + const message = createBaseProposal(); + message.id = object.id !== undefined && object.id !== null ? Long.fromValue(object.id) : Long.UZERO; + message.address = object.address ?? ""; + message.metadata = object.metadata ?? ""; + message.proposers = object.proposers?.map(e => e) || []; + message.submitTime = object.submitTime ?? undefined; + message.groupVersion = object.groupVersion !== undefined && object.groupVersion !== null ? Long.fromValue(object.groupVersion) : Long.UZERO; + message.groupPolicyVersion = object.groupPolicyVersion !== undefined && object.groupPolicyVersion !== null ? Long.fromValue(object.groupPolicyVersion) : Long.UZERO; + message.status = object.status ?? 0; + message.result = object.result ?? 0; + message.finalTallyResult = object.finalTallyResult !== undefined && object.finalTallyResult !== null ? TallyResult.fromPartial(object.finalTallyResult) : undefined; + message.votingPeriodEnd = object.votingPeriodEnd ?? undefined; + message.executorResult = object.executorResult ?? 0; + message.messages = object.messages?.map(e => Any.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseTallyResult(): TallyResult { + return { + yesCount: "", + abstainCount: "", + noCount: "", + noWithVetoCount: "" + }; +} + +export const TallyResult = { + encode(message: TallyResult, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.yesCount !== "") { + writer.uint32(10).string(message.yesCount); + } + + if (message.abstainCount !== "") { + writer.uint32(18).string(message.abstainCount); + } + + if (message.noCount !== "") { + writer.uint32(26).string(message.noCount); + } + + if (message.noWithVetoCount !== "") { + writer.uint32(34).string(message.noWithVetoCount); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TallyResult { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTallyResult(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.yesCount = reader.string(); + break; + + case 2: + message.abstainCount = reader.string(); + break; + + case 3: + message.noCount = reader.string(); + break; + + case 4: + message.noWithVetoCount = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TallyResult { + const message = createBaseTallyResult(); + message.yesCount = object.yesCount ?? ""; + message.abstainCount = object.abstainCount ?? ""; + message.noCount = object.noCount ?? ""; + message.noWithVetoCount = object.noWithVetoCount ?? ""; + return message; + } + +}; + +function createBaseVote(): Vote { + return { + proposalId: Long.UZERO, + voter: "", + option: 0, + metadata: "", + submitTime: undefined + }; +} + +export const Vote = { + encode(message: Vote, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.proposalId.isZero()) { + writer.uint32(8).uint64(message.proposalId); + } + + if (message.voter !== "") { + writer.uint32(18).string(message.voter); + } + + if (message.option !== 0) { + writer.uint32(24).int32(message.option); + } + + if (message.metadata !== "") { + writer.uint32(34).string(message.metadata); + } + + if (message.submitTime !== undefined) { + Timestamp.encode(toTimestamp(message.submitTime), writer.uint32(42).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Vote { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVote(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proposalId = (reader.uint64() as Long); + break; + + case 2: + message.voter = reader.string(); + break; + + case 3: + message.option = (reader.int32() as any); + break; + + case 4: + message.metadata = reader.string(); + break; + + case 5: + message.submitTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Vote { + const message = createBaseVote(); + message.proposalId = object.proposalId !== undefined && object.proposalId !== null ? Long.fromValue(object.proposalId) : Long.UZERO; + message.voter = object.voter ?? ""; + message.option = object.option ?? 0; + message.metadata = object.metadata ?? ""; + message.submitTime = object.submitTime ?? undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/lcd.ts b/packages/codegen/src/cosmos/lcd.ts new file mode 100644 index 00000000..954fcdf8 --- /dev/null +++ b/packages/codegen/src/cosmos/lcd.ts @@ -0,0 +1,99 @@ +import { LCDClient } from "@osmonauts/lcd"; +export const createLCDClient = async ({ + restEndpoint +}: { + restEndpoint: string; +}) => { + const requestClient = new LCDClient({ + restEndpoint + }); + return { + cosmos: { + auth: { + v1beta1: new (await import("./auth/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + authz: { + v1beta1: new (await import("./authz/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + bank: { + v1beta1: new (await import("./bank/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + base: { + tendermint: { + v1beta1: new (await import("./base/tendermint/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + } + }, + distribution: { + v1beta1: new (await import("./distribution/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + evidence: { + v1beta1: new (await import("./evidence/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + feegrant: { + v1beta1: new (await import("./feegrant/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + gov: { + v1: new (await import("./gov/v1/query.lcd")).LCDQueryClient({ + requestClient + }), + v1beta1: new (await import("./gov/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + group: { + v1: new (await import("./group/v1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + mint: { + v1beta1: new (await import("./mint/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + nft: { + v1beta1: new (await import("./nft/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + params: { + v1beta1: new (await import("./params/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + slashing: { + v1beta1: new (await import("./slashing/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + staking: { + v1beta1: new (await import("./staking/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + tx: { + v1beta1: new (await import("./tx/v1beta1/service.lcd")).LCDQueryClient({ + requestClient + }) + }, + upgrade: { + v1beta1: new (await import("./upgrade/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + } + } + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/mint/v1beta1/genesis.ts b/packages/codegen/src/cosmos/mint/v1beta1/genesis.ts new file mode 100644 index 00000000..a1419886 --- /dev/null +++ b/packages/codegen/src/cosmos/mint/v1beta1/genesis.ts @@ -0,0 +1,73 @@ +import { Minter, MinterSDKType, Params, ParamsSDKType } from "./mint"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the mint module's genesis state. */ + +export interface GenesisState { + /** minter is a space for holding current inflation information. */ + minter?: Minter; + /** params defines all the paramaters of the module. */ + + params?: Params; +} +/** GenesisState defines the mint module's genesis state. */ + +export interface GenesisStateSDKType { + minter?: MinterSDKType; + params?: ParamsSDKType; +} + +function createBaseGenesisState(): GenesisState { + return { + minter: undefined, + params: undefined + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.minter !== undefined) { + Minter.encode(message.minter, writer.uint32(10).fork()).ldelim(); + } + + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.minter = Minter.decode(reader, reader.uint32()); + break; + + case 2: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.minter = object.minter !== undefined && object.minter !== null ? Minter.fromPartial(object.minter) : undefined; + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/mint/v1beta1/mint.ts b/packages/codegen/src/cosmos/mint/v1beta1/mint.ts new file mode 100644 index 00000000..ea0eacc6 --- /dev/null +++ b/packages/codegen/src/cosmos/mint/v1beta1/mint.ts @@ -0,0 +1,198 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** Minter represents the minting state. */ + +export interface Minter { + /** current annual inflation rate */ + inflation: string; + /** current annual expected provisions */ + + annualProvisions: string; +} +/** Minter represents the minting state. */ + +export interface MinterSDKType { + inflation: string; + annual_provisions: string; +} +/** Params holds parameters for the mint module. */ + +export interface Params { + /** type of coin to mint */ + mintDenom: string; + /** maximum annual change in inflation rate */ + + inflationRateChange: string; + /** maximum inflation rate */ + + inflationMax: string; + /** minimum inflation rate */ + + inflationMin: string; + /** goal of percent bonded atoms */ + + goalBonded: string; + /** expected blocks per year */ + + blocksPerYear: Long; +} +/** Params holds parameters for the mint module. */ + +export interface ParamsSDKType { + mint_denom: string; + inflation_rate_change: string; + inflation_max: string; + inflation_min: string; + goal_bonded: string; + blocks_per_year: Long; +} + +function createBaseMinter(): Minter { + return { + inflation: "", + annualProvisions: "" + }; +} + +export const Minter = { + encode(message: Minter, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.inflation !== "") { + writer.uint32(10).string(message.inflation); + } + + if (message.annualProvisions !== "") { + writer.uint32(18).string(message.annualProvisions); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Minter { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMinter(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.inflation = reader.string(); + break; + + case 2: + message.annualProvisions = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Minter { + const message = createBaseMinter(); + message.inflation = object.inflation ?? ""; + message.annualProvisions = object.annualProvisions ?? ""; + return message; + } + +}; + +function createBaseParams(): Params { + return { + mintDenom: "", + inflationRateChange: "", + inflationMax: "", + inflationMin: "", + goalBonded: "", + blocksPerYear: Long.UZERO + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.mintDenom !== "") { + writer.uint32(10).string(message.mintDenom); + } + + if (message.inflationRateChange !== "") { + writer.uint32(18).string(message.inflationRateChange); + } + + if (message.inflationMax !== "") { + writer.uint32(26).string(message.inflationMax); + } + + if (message.inflationMin !== "") { + writer.uint32(34).string(message.inflationMin); + } + + if (message.goalBonded !== "") { + writer.uint32(42).string(message.goalBonded); + } + + if (!message.blocksPerYear.isZero()) { + writer.uint32(48).uint64(message.blocksPerYear); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.mintDenom = reader.string(); + break; + + case 2: + message.inflationRateChange = reader.string(); + break; + + case 3: + message.inflationMax = reader.string(); + break; + + case 4: + message.inflationMin = reader.string(); + break; + + case 5: + message.goalBonded = reader.string(); + break; + + case 6: + message.blocksPerYear = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Params { + const message = createBaseParams(); + message.mintDenom = object.mintDenom ?? ""; + message.inflationRateChange = object.inflationRateChange ?? ""; + message.inflationMax = object.inflationMax ?? ""; + message.inflationMin = object.inflationMin ?? ""; + message.goalBonded = object.goalBonded ?? ""; + message.blocksPerYear = object.blocksPerYear !== undefined && object.blocksPerYear !== null ? Long.fromValue(object.blocksPerYear) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/mint/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/mint/v1beta1/query.lcd.ts new file mode 100644 index 00000000..92086040 --- /dev/null +++ b/packages/codegen/src/cosmos/mint/v1beta1/query.lcd.ts @@ -0,0 +1,38 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryParamsRequest, QueryParamsResponseSDKType, QueryInflationRequest, QueryInflationResponseSDKType, QueryAnnualProvisionsRequest, QueryAnnualProvisionsResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.params = this.params.bind(this); + this.inflation = this.inflation.bind(this); + this.annualProvisions = this.annualProvisions.bind(this); + } + /* Params returns the total set of minting parameters. */ + + + async params(_params: QueryParamsRequest = {}): Promise { + const endpoint = `cosmos/mint/v1beta1/params`; + return await this.req.get(endpoint); + } + /* Inflation returns the current minting inflation value. */ + + + async inflation(_params: QueryInflationRequest = {}): Promise { + const endpoint = `cosmos/mint/v1beta1/inflation`; + return await this.req.get(endpoint); + } + /* AnnualProvisions current minting annual provisions value. */ + + + async annualProvisions(_params: QueryAnnualProvisionsRequest = {}): Promise { + const endpoint = `cosmos/mint/v1beta1/annual_provisions`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/mint/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/mint/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..4e7ca73e --- /dev/null +++ b/packages/codegen/src/cosmos/mint/v1beta1/query.rpc.Query.ts @@ -0,0 +1,63 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryParamsRequest, QueryParamsResponse, QueryInflationRequest, QueryInflationResponse, QueryAnnualProvisionsRequest, QueryAnnualProvisionsResponse } from "./query"; +/** Query provides defines the gRPC querier service. */ + +export interface Query { + /** Params returns the total set of minting parameters. */ + params(request?: QueryParamsRequest): Promise; + /** Inflation returns the current minting inflation value. */ + + inflation(request?: QueryInflationRequest): Promise; + /** AnnualProvisions current minting annual provisions value. */ + + annualProvisions(request?: QueryAnnualProvisionsRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.params = this.params.bind(this); + this.inflation = this.inflation.bind(this); + this.annualProvisions = this.annualProvisions.bind(this); + } + + params(request: QueryParamsRequest = {}): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.mint.v1beta1.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + inflation(request: QueryInflationRequest = {}): Promise { + const data = QueryInflationRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.mint.v1beta1.Query", "Inflation", data); + return promise.then(data => QueryInflationResponse.decode(new _m0.Reader(data))); + } + + annualProvisions(request: QueryAnnualProvisionsRequest = {}): Promise { + const data = QueryAnnualProvisionsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.mint.v1beta1.Query", "AnnualProvisions", data); + return promise.then(data => QueryAnnualProvisionsResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + params(request?: QueryParamsRequest): Promise { + return queryService.params(request); + }, + + inflation(request?: QueryInflationRequest): Promise { + return queryService.inflation(request); + }, + + annualProvisions(request?: QueryAnnualProvisionsRequest): Promise { + return queryService.annualProvisions(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/mint/v1beta1/query.ts b/packages/codegen/src/cosmos/mint/v1beta1/query.ts new file mode 100644 index 00000000..066cd69b --- /dev/null +++ b/packages/codegen/src/cosmos/mint/v1beta1/query.ts @@ -0,0 +1,309 @@ +import { Params, ParamsSDKType } from "./mint"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequest {} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequestSDKType {} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params?: Params; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +/** QueryInflationRequest is the request type for the Query/Inflation RPC method. */ + +export interface QueryInflationRequest {} +/** QueryInflationRequest is the request type for the Query/Inflation RPC method. */ + +export interface QueryInflationRequestSDKType {} +/** + * QueryInflationResponse is the response type for the Query/Inflation RPC + * method. + */ + +export interface QueryInflationResponse { + /** inflation is the current minting inflation value. */ + inflation: Uint8Array; +} +/** + * QueryInflationResponse is the response type for the Query/Inflation RPC + * method. + */ + +export interface QueryInflationResponseSDKType { + inflation: Uint8Array; +} +/** + * QueryAnnualProvisionsRequest is the request type for the + * Query/AnnualProvisions RPC method. + */ + +export interface QueryAnnualProvisionsRequest {} +/** + * QueryAnnualProvisionsRequest is the request type for the + * Query/AnnualProvisions RPC method. + */ + +export interface QueryAnnualProvisionsRequestSDKType {} +/** + * QueryAnnualProvisionsResponse is the response type for the + * Query/AnnualProvisions RPC method. + */ + +export interface QueryAnnualProvisionsResponse { + /** annual_provisions is the current minting annual provisions value. */ + annualProvisions: Uint8Array; +} +/** + * QueryAnnualProvisionsResponse is the response type for the + * Query/AnnualProvisions RPC method. + */ + +export interface QueryAnnualProvisionsResponseSDKType { + annual_provisions: Uint8Array; +} + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + params: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseQueryInflationRequest(): QueryInflationRequest { + return {}; +} + +export const QueryInflationRequest = { + encode(_: QueryInflationRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryInflationRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryInflationRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryInflationRequest { + const message = createBaseQueryInflationRequest(); + return message; + } + +}; + +function createBaseQueryInflationResponse(): QueryInflationResponse { + return { + inflation: new Uint8Array() + }; +} + +export const QueryInflationResponse = { + encode(message: QueryInflationResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.inflation.length !== 0) { + writer.uint32(10).bytes(message.inflation); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryInflationResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryInflationResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.inflation = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryInflationResponse { + const message = createBaseQueryInflationResponse(); + message.inflation = object.inflation ?? new Uint8Array(); + return message; + } + +}; + +function createBaseQueryAnnualProvisionsRequest(): QueryAnnualProvisionsRequest { + return {}; +} + +export const QueryAnnualProvisionsRequest = { + encode(_: QueryAnnualProvisionsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAnnualProvisionsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAnnualProvisionsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryAnnualProvisionsRequest { + const message = createBaseQueryAnnualProvisionsRequest(); + return message; + } + +}; + +function createBaseQueryAnnualProvisionsResponse(): QueryAnnualProvisionsResponse { + return { + annualProvisions: new Uint8Array() + }; +} + +export const QueryAnnualProvisionsResponse = { + encode(message: QueryAnnualProvisionsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.annualProvisions.length !== 0) { + writer.uint32(10).bytes(message.annualProvisions); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAnnualProvisionsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAnnualProvisionsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.annualProvisions = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAnnualProvisionsResponse { + const message = createBaseQueryAnnualProvisionsResponse(); + message.annualProvisions = object.annualProvisions ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/msg/v1/msg.ts b/packages/codegen/src/cosmos/msg/v1/msg.ts new file mode 100644 index 00000000..693da49f --- /dev/null +++ b/packages/codegen/src/cosmos/msg/v1/msg.ts @@ -0,0 +1 @@ +export {} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/nft/v1beta1/event.ts b/packages/codegen/src/cosmos/nft/v1beta1/event.ts new file mode 100644 index 00000000..88a73f56 --- /dev/null +++ b/packages/codegen/src/cosmos/nft/v1beta1/event.ts @@ -0,0 +1,251 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** EventSend is emitted on Msg/Send */ + +export interface EventSend { + classId: string; + id: string; + sender: string; + receiver: string; +} +/** EventSend is emitted on Msg/Send */ + +export interface EventSendSDKType { + class_id: string; + id: string; + sender: string; + receiver: string; +} +/** EventMint is emitted on Mint */ + +export interface EventMint { + classId: string; + id: string; + owner: string; +} +/** EventMint is emitted on Mint */ + +export interface EventMintSDKType { + class_id: string; + id: string; + owner: string; +} +/** EventBurn is emitted on Burn */ + +export interface EventBurn { + classId: string; + id: string; + owner: string; +} +/** EventBurn is emitted on Burn */ + +export interface EventBurnSDKType { + class_id: string; + id: string; + owner: string; +} + +function createBaseEventSend(): EventSend { + return { + classId: "", + id: "", + sender: "", + receiver: "" + }; +} + +export const EventSend = { + encode(message: EventSend, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.classId !== "") { + writer.uint32(10).string(message.classId); + } + + if (message.id !== "") { + writer.uint32(18).string(message.id); + } + + if (message.sender !== "") { + writer.uint32(26).string(message.sender); + } + + if (message.receiver !== "") { + writer.uint32(34).string(message.receiver); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventSend { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventSend(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classId = reader.string(); + break; + + case 2: + message.id = reader.string(); + break; + + case 3: + message.sender = reader.string(); + break; + + case 4: + message.receiver = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventSend { + const message = createBaseEventSend(); + message.classId = object.classId ?? ""; + message.id = object.id ?? ""; + message.sender = object.sender ?? ""; + message.receiver = object.receiver ?? ""; + return message; + } + +}; + +function createBaseEventMint(): EventMint { + return { + classId: "", + id: "", + owner: "" + }; +} + +export const EventMint = { + encode(message: EventMint, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.classId !== "") { + writer.uint32(10).string(message.classId); + } + + if (message.id !== "") { + writer.uint32(18).string(message.id); + } + + if (message.owner !== "") { + writer.uint32(26).string(message.owner); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventMint { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventMint(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classId = reader.string(); + break; + + case 2: + message.id = reader.string(); + break; + + case 3: + message.owner = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventMint { + const message = createBaseEventMint(); + message.classId = object.classId ?? ""; + message.id = object.id ?? ""; + message.owner = object.owner ?? ""; + return message; + } + +}; + +function createBaseEventBurn(): EventBurn { + return { + classId: "", + id: "", + owner: "" + }; +} + +export const EventBurn = { + encode(message: EventBurn, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.classId !== "") { + writer.uint32(10).string(message.classId); + } + + if (message.id !== "") { + writer.uint32(18).string(message.id); + } + + if (message.owner !== "") { + writer.uint32(26).string(message.owner); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventBurn { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventBurn(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classId = reader.string(); + break; + + case 2: + message.id = reader.string(); + break; + + case 3: + message.owner = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventBurn { + const message = createBaseEventBurn(); + message.classId = object.classId ?? ""; + message.id = object.id ?? ""; + message.owner = object.owner ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/nft/v1beta1/genesis.ts b/packages/codegen/src/cosmos/nft/v1beta1/genesis.ts new file mode 100644 index 00000000..43a5a92e --- /dev/null +++ b/packages/codegen/src/cosmos/nft/v1beta1/genesis.ts @@ -0,0 +1,141 @@ +import { Class, ClassSDKType, NFT, NFTSDKType } from "./nft"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** GenesisState defines the nft module's genesis state. */ + +export interface GenesisState { + /** class defines the class of the nft type. */ + classes: Class[]; + entries: Entry[]; +} +/** GenesisState defines the nft module's genesis state. */ + +export interface GenesisStateSDKType { + classes: ClassSDKType[]; + entries: EntrySDKType[]; +} +/** Entry Defines all nft owned by a person */ + +export interface Entry { + /** owner is the owner address of the following nft */ + owner: string; + /** nfts is a group of nfts of the same owner */ + + nfts: NFT[]; +} +/** Entry Defines all nft owned by a person */ + +export interface EntrySDKType { + owner: string; + nfts: NFTSDKType[]; +} + +function createBaseGenesisState(): GenesisState { + return { + classes: [], + entries: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.classes) { + Class.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.entries) { + Entry.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classes.push(Class.decode(reader, reader.uint32())); + break; + + case 2: + message.entries.push(Entry.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.classes = object.classes?.map(e => Class.fromPartial(e)) || []; + message.entries = object.entries?.map(e => Entry.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseEntry(): Entry { + return { + owner: "", + nfts: [] + }; +} + +export const Entry = { + encode(message: Entry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.owner !== "") { + writer.uint32(10).string(message.owner); + } + + for (const v of message.nfts) { + NFT.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Entry { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEntry(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.owner = reader.string(); + break; + + case 2: + message.nfts.push(NFT.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Entry { + const message = createBaseEntry(); + message.owner = object.owner ?? ""; + message.nfts = object.nfts?.map(e => NFT.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/nft/v1beta1/nft.ts b/packages/codegen/src/cosmos/nft/v1beta1/nft.ts new file mode 100644 index 00000000..77f4cf83 --- /dev/null +++ b/packages/codegen/src/cosmos/nft/v1beta1/nft.ts @@ -0,0 +1,255 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** Class defines the class of the nft type. */ + +export interface Class { + /** id defines the unique identifier of the NFT classification, similar to the contract address of ERC721 */ + id: string; + /** name defines the human-readable name of the NFT classification. Optional */ + + name: string; + /** symbol is an abbreviated name for nft classification. Optional */ + + symbol: string; + /** description is a brief description of nft classification. Optional */ + + description: string; + /** uri for the class metadata stored off chain. It can define schema for Class and NFT `Data` attributes. Optional */ + + uri: string; + /** uri_hash is a hash of the document pointed by uri. Optional */ + + uriHash: string; + /** data is the app specific metadata of the NFT class. Optional */ + + data?: Any; +} +/** Class defines the class of the nft type. */ + +export interface ClassSDKType { + id: string; + name: string; + symbol: string; + description: string; + uri: string; + uri_hash: string; + data?: AnySDKType; +} +/** NFT defines the NFT. */ + +export interface NFT { + /** class_id associated with the NFT, similar to the contract address of ERC721 */ + classId: string; + /** id is a unique identifier of the NFT */ + + id: string; + /** uri for the NFT metadata stored off chain */ + + uri: string; + /** uri_hash is a hash of the document pointed by uri */ + + uriHash: string; + /** data is an app specific data of the NFT. Optional */ + + data?: Any; +} +/** NFT defines the NFT. */ + +export interface NFTSDKType { + class_id: string; + id: string; + uri: string; + uri_hash: string; + data?: AnySDKType; +} + +function createBaseClass(): Class { + return { + id: "", + name: "", + symbol: "", + description: "", + uri: "", + uriHash: "", + data: undefined + }; +} + +export const Class = { + encode(message: Class, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + + if (message.name !== "") { + writer.uint32(18).string(message.name); + } + + if (message.symbol !== "") { + writer.uint32(26).string(message.symbol); + } + + if (message.description !== "") { + writer.uint32(34).string(message.description); + } + + if (message.uri !== "") { + writer.uint32(42).string(message.uri); + } + + if (message.uriHash !== "") { + writer.uint32(50).string(message.uriHash); + } + + if (message.data !== undefined) { + Any.encode(message.data, writer.uint32(58).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Class { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClass(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.id = reader.string(); + break; + + case 2: + message.name = reader.string(); + break; + + case 3: + message.symbol = reader.string(); + break; + + case 4: + message.description = reader.string(); + break; + + case 5: + message.uri = reader.string(); + break; + + case 6: + message.uriHash = reader.string(); + break; + + case 7: + message.data = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Class { + const message = createBaseClass(); + message.id = object.id ?? ""; + message.name = object.name ?? ""; + message.symbol = object.symbol ?? ""; + message.description = object.description ?? ""; + message.uri = object.uri ?? ""; + message.uriHash = object.uriHash ?? ""; + message.data = object.data !== undefined && object.data !== null ? Any.fromPartial(object.data) : undefined; + return message; + } + +}; + +function createBaseNFT(): NFT { + return { + classId: "", + id: "", + uri: "", + uriHash: "", + data: undefined + }; +} + +export const NFT = { + encode(message: NFT, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.classId !== "") { + writer.uint32(10).string(message.classId); + } + + if (message.id !== "") { + writer.uint32(18).string(message.id); + } + + if (message.uri !== "") { + writer.uint32(26).string(message.uri); + } + + if (message.uriHash !== "") { + writer.uint32(34).string(message.uriHash); + } + + if (message.data !== undefined) { + Any.encode(message.data, writer.uint32(82).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NFT { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNFT(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classId = reader.string(); + break; + + case 2: + message.id = reader.string(); + break; + + case 3: + message.uri = reader.string(); + break; + + case 4: + message.uriHash = reader.string(); + break; + + case 10: + message.data = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): NFT { + const message = createBaseNFT(); + message.classId = object.classId ?? ""; + message.id = object.id ?? ""; + message.uri = object.uri ?? ""; + message.uriHash = object.uriHash ?? ""; + message.data = object.data !== undefined && object.data !== null ? Any.fromPartial(object.data) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/nft/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/nft/v1beta1/query.lcd.ts new file mode 100644 index 00000000..ca3a1dc1 --- /dev/null +++ b/packages/codegen/src/cosmos/nft/v1beta1/query.lcd.ts @@ -0,0 +1,98 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryBalanceRequest, QueryBalanceResponseSDKType, QueryOwnerRequest, QueryOwnerResponseSDKType, QuerySupplyRequest, QuerySupplyResponseSDKType, QueryNFTsRequest, QueryNFTsResponseSDKType, QueryNFTRequest, QueryNFTResponseSDKType, QueryClassRequest, QueryClassResponseSDKType, QueryClassesRequest, QueryClassesResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.balance = this.balance.bind(this); + this.owner = this.owner.bind(this); + this.supply = this.supply.bind(this); + this.nFTs = this.nFTs.bind(this); + this.nFT = this.nFT.bind(this); + this.class = this.class.bind(this); + this.classes = this.classes.bind(this); + } + /* Balance queries the number of NFTs of a given class owned by the owner, same as balanceOf in ERC721 */ + + + async balance(params: QueryBalanceRequest): Promise { + const endpoint = `cosmos/nft/v1beta1/balance/${params.owner}/${params.classId}`; + return await this.req.get(endpoint); + } + /* Owner queries the owner of the NFT based on its class and id, same as ownerOf in ERC721 */ + + + async owner(params: QueryOwnerRequest): Promise { + const endpoint = `cosmos/nft/v1beta1/owner/${params.classId}/${params.id}`; + return await this.req.get(endpoint); + } + /* Supply queries the number of NFTs from the given class, same as totalSupply of ERC721. */ + + + async supply(params: QuerySupplyRequest): Promise { + const endpoint = `cosmos/nft/v1beta1/supply/${params.classId}`; + return await this.req.get(endpoint); + } + /* NFTs queries all NFTs of a given class or owner,choose at least one of the two, similar to tokenByIndex in + ERC721Enumerable */ + + + async nFTs(params: QueryNFTsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.classId !== "undefined") { + options.params.class_id = params.classId; + } + + if (typeof params?.owner !== "undefined") { + options.params.owner = params.owner; + } + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/nft/v1beta1/nfts`; + return await this.req.get(endpoint, options); + } + /* NFT queries an NFT based on its class and id. */ + + + async nFT(params: QueryNFTRequest): Promise { + const endpoint = `cosmos/nft/v1beta1/nfts/${params.classId}/${params.id}`; + return await this.req.get(endpoint); + } + /* Class queries an NFT class based on its id */ + + + async class(params: QueryClassRequest): Promise { + const endpoint = `cosmos/nft/v1beta1/classes/${params.classId}`; + return await this.req.get(endpoint); + } + /* Classes queries all NFT classes */ + + + async classes(params: QueryClassesRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/nft/v1beta1/classes`; + return await this.req.get(endpoint, options); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/nft/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/nft/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..72e63815 --- /dev/null +++ b/packages/codegen/src/cosmos/nft/v1beta1/query.rpc.Query.ts @@ -0,0 +1,124 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryBalanceRequest, QueryBalanceResponse, QueryOwnerRequest, QueryOwnerResponse, QuerySupplyRequest, QuerySupplyResponse, QueryNFTsRequest, QueryNFTsResponse, QueryNFTRequest, QueryNFTResponse, QueryClassRequest, QueryClassResponse, QueryClassesRequest, QueryClassesResponse } from "./query"; +/** Query defines the gRPC querier service. */ + +export interface Query { + /** Balance queries the number of NFTs of a given class owned by the owner, same as balanceOf in ERC721 */ + balance(request: QueryBalanceRequest): Promise; + /** Owner queries the owner of the NFT based on its class and id, same as ownerOf in ERC721 */ + + owner(request: QueryOwnerRequest): Promise; + /** Supply queries the number of NFTs from the given class, same as totalSupply of ERC721. */ + + supply(request: QuerySupplyRequest): Promise; + /** + * NFTs queries all NFTs of a given class or owner,choose at least one of the two, similar to tokenByIndex in + * ERC721Enumerable + */ + + nFTs(request: QueryNFTsRequest): Promise; + /** NFT queries an NFT based on its class and id. */ + + nFT(request: QueryNFTRequest): Promise; + /** Class queries an NFT class based on its id */ + + class(request: QueryClassRequest): Promise; + /** Classes queries all NFT classes */ + + classes(request?: QueryClassesRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.balance = this.balance.bind(this); + this.owner = this.owner.bind(this); + this.supply = this.supply.bind(this); + this.nFTs = this.nFTs.bind(this); + this.nFT = this.nFT.bind(this); + this.class = this.class.bind(this); + this.classes = this.classes.bind(this); + } + + balance(request: QueryBalanceRequest): Promise { + const data = QueryBalanceRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.nft.v1beta1.Query", "Balance", data); + return promise.then(data => QueryBalanceResponse.decode(new _m0.Reader(data))); + } + + owner(request: QueryOwnerRequest): Promise { + const data = QueryOwnerRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.nft.v1beta1.Query", "Owner", data); + return promise.then(data => QueryOwnerResponse.decode(new _m0.Reader(data))); + } + + supply(request: QuerySupplyRequest): Promise { + const data = QuerySupplyRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.nft.v1beta1.Query", "Supply", data); + return promise.then(data => QuerySupplyResponse.decode(new _m0.Reader(data))); + } + + nFTs(request: QueryNFTsRequest): Promise { + const data = QueryNFTsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.nft.v1beta1.Query", "NFTs", data); + return promise.then(data => QueryNFTsResponse.decode(new _m0.Reader(data))); + } + + nFT(request: QueryNFTRequest): Promise { + const data = QueryNFTRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.nft.v1beta1.Query", "NFT", data); + return promise.then(data => QueryNFTResponse.decode(new _m0.Reader(data))); + } + + class(request: QueryClassRequest): Promise { + const data = QueryClassRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.nft.v1beta1.Query", "Class", data); + return promise.then(data => QueryClassResponse.decode(new _m0.Reader(data))); + } + + classes(request: QueryClassesRequest = { + pagination: undefined + }): Promise { + const data = QueryClassesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.nft.v1beta1.Query", "Classes", data); + return promise.then(data => QueryClassesResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + balance(request: QueryBalanceRequest): Promise { + return queryService.balance(request); + }, + + owner(request: QueryOwnerRequest): Promise { + return queryService.owner(request); + }, + + supply(request: QuerySupplyRequest): Promise { + return queryService.supply(request); + }, + + nFTs(request: QueryNFTsRequest): Promise { + return queryService.nFTs(request); + }, + + nFT(request: QueryNFTRequest): Promise { + return queryService.nFT(request); + }, + + class(request: QueryClassRequest): Promise { + return queryService.class(request); + }, + + classes(request?: QueryClassesRequest): Promise { + return queryService.classes(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/nft/v1beta1/query.ts b/packages/codegen/src/cosmos/nft/v1beta1/query.ts new file mode 100644 index 00000000..2d72be79 --- /dev/null +++ b/packages/codegen/src/cosmos/nft/v1beta1/query.ts @@ -0,0 +1,859 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { NFT, NFTSDKType, Class, ClassSDKType } from "./nft"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** QueryBalanceRequest is the request type for the Query/Balance RPC method */ + +export interface QueryBalanceRequest { + classId: string; + owner: string; +} +/** QueryBalanceRequest is the request type for the Query/Balance RPC method */ + +export interface QueryBalanceRequestSDKType { + class_id: string; + owner: string; +} +/** QueryBalanceResponse is the response type for the Query/Balance RPC method */ + +export interface QueryBalanceResponse { + amount: Long; +} +/** QueryBalanceResponse is the response type for the Query/Balance RPC method */ + +export interface QueryBalanceResponseSDKType { + amount: Long; +} +/** QueryOwnerRequest is the request type for the Query/Owner RPC method */ + +export interface QueryOwnerRequest { + classId: string; + id: string; +} +/** QueryOwnerRequest is the request type for the Query/Owner RPC method */ + +export interface QueryOwnerRequestSDKType { + class_id: string; + id: string; +} +/** QueryOwnerResponse is the response type for the Query/Owner RPC method */ + +export interface QueryOwnerResponse { + owner: string; +} +/** QueryOwnerResponse is the response type for the Query/Owner RPC method */ + +export interface QueryOwnerResponseSDKType { + owner: string; +} +/** QuerySupplyRequest is the request type for the Query/Supply RPC method */ + +export interface QuerySupplyRequest { + classId: string; +} +/** QuerySupplyRequest is the request type for the Query/Supply RPC method */ + +export interface QuerySupplyRequestSDKType { + class_id: string; +} +/** QuerySupplyResponse is the response type for the Query/Supply RPC method */ + +export interface QuerySupplyResponse { + amount: Long; +} +/** QuerySupplyResponse is the response type for the Query/Supply RPC method */ + +export interface QuerySupplyResponseSDKType { + amount: Long; +} +/** QueryNFTstRequest is the request type for the Query/NFTs RPC method */ + +export interface QueryNFTsRequest { + classId: string; + owner: string; + pagination?: PageRequest; +} +/** QueryNFTstRequest is the request type for the Query/NFTs RPC method */ + +export interface QueryNFTsRequestSDKType { + class_id: string; + owner: string; + pagination?: PageRequestSDKType; +} +/** QueryNFTsResponse is the response type for the Query/NFTs RPC methods */ + +export interface QueryNFTsResponse { + nfts: NFT[]; + pagination?: PageResponse; +} +/** QueryNFTsResponse is the response type for the Query/NFTs RPC methods */ + +export interface QueryNFTsResponseSDKType { + nfts: NFTSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryNFTRequest is the request type for the Query/NFT RPC method */ + +export interface QueryNFTRequest { + classId: string; + id: string; +} +/** QueryNFTRequest is the request type for the Query/NFT RPC method */ + +export interface QueryNFTRequestSDKType { + class_id: string; + id: string; +} +/** QueryNFTResponse is the response type for the Query/NFT RPC method */ + +export interface QueryNFTResponse { + nft?: NFT; +} +/** QueryNFTResponse is the response type for the Query/NFT RPC method */ + +export interface QueryNFTResponseSDKType { + nft?: NFTSDKType; +} +/** QueryClassRequest is the request type for the Query/Class RPC method */ + +export interface QueryClassRequest { + classId: string; +} +/** QueryClassRequest is the request type for the Query/Class RPC method */ + +export interface QueryClassRequestSDKType { + class_id: string; +} +/** QueryClassResponse is the response type for the Query/Class RPC method */ + +export interface QueryClassResponse { + class?: Class; +} +/** QueryClassResponse is the response type for the Query/Class RPC method */ + +export interface QueryClassResponseSDKType { + class?: ClassSDKType; +} +/** QueryClassesRequest is the request type for the Query/Classes RPC method */ + +export interface QueryClassesRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryClassesRequest is the request type for the Query/Classes RPC method */ + +export interface QueryClassesRequestSDKType { + pagination?: PageRequestSDKType; +} +/** QueryClassesResponse is the response type for the Query/Classes RPC method */ + +export interface QueryClassesResponse { + classes: Class[]; + pagination?: PageResponse; +} +/** QueryClassesResponse is the response type for the Query/Classes RPC method */ + +export interface QueryClassesResponseSDKType { + classes: ClassSDKType[]; + pagination?: PageResponseSDKType; +} + +function createBaseQueryBalanceRequest(): QueryBalanceRequest { + return { + classId: "", + owner: "" + }; +} + +export const QueryBalanceRequest = { + encode(message: QueryBalanceRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.classId !== "") { + writer.uint32(10).string(message.classId); + } + + if (message.owner !== "") { + writer.uint32(18).string(message.owner); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryBalanceRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryBalanceRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classId = reader.string(); + break; + + case 2: + message.owner = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryBalanceRequest { + const message = createBaseQueryBalanceRequest(); + message.classId = object.classId ?? ""; + message.owner = object.owner ?? ""; + return message; + } + +}; + +function createBaseQueryBalanceResponse(): QueryBalanceResponse { + return { + amount: Long.UZERO + }; +} + +export const QueryBalanceResponse = { + encode(message: QueryBalanceResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.amount.isZero()) { + writer.uint32(8).uint64(message.amount); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryBalanceResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryBalanceResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.amount = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryBalanceResponse { + const message = createBaseQueryBalanceResponse(); + message.amount = object.amount !== undefined && object.amount !== null ? Long.fromValue(object.amount) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryOwnerRequest(): QueryOwnerRequest { + return { + classId: "", + id: "" + }; +} + +export const QueryOwnerRequest = { + encode(message: QueryOwnerRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.classId !== "") { + writer.uint32(10).string(message.classId); + } + + if (message.id !== "") { + writer.uint32(18).string(message.id); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryOwnerRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryOwnerRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classId = reader.string(); + break; + + case 2: + message.id = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryOwnerRequest { + const message = createBaseQueryOwnerRequest(); + message.classId = object.classId ?? ""; + message.id = object.id ?? ""; + return message; + } + +}; + +function createBaseQueryOwnerResponse(): QueryOwnerResponse { + return { + owner: "" + }; +} + +export const QueryOwnerResponse = { + encode(message: QueryOwnerResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.owner !== "") { + writer.uint32(10).string(message.owner); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryOwnerResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryOwnerResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.owner = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryOwnerResponse { + const message = createBaseQueryOwnerResponse(); + message.owner = object.owner ?? ""; + return message; + } + +}; + +function createBaseQuerySupplyRequest(): QuerySupplyRequest { + return { + classId: "" + }; +} + +export const QuerySupplyRequest = { + encode(message: QuerySupplyRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.classId !== "") { + writer.uint32(10).string(message.classId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySupplyRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySupplyRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySupplyRequest { + const message = createBaseQuerySupplyRequest(); + message.classId = object.classId ?? ""; + return message; + } + +}; + +function createBaseQuerySupplyResponse(): QuerySupplyResponse { + return { + amount: Long.UZERO + }; +} + +export const QuerySupplyResponse = { + encode(message: QuerySupplyResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.amount.isZero()) { + writer.uint32(8).uint64(message.amount); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySupplyResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySupplyResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.amount = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySupplyResponse { + const message = createBaseQuerySupplyResponse(); + message.amount = object.amount !== undefined && object.amount !== null ? Long.fromValue(object.amount) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryNFTsRequest(): QueryNFTsRequest { + return { + classId: "", + owner: "", + pagination: undefined + }; +} + +export const QueryNFTsRequest = { + encode(message: QueryNFTsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.classId !== "") { + writer.uint32(10).string(message.classId); + } + + if (message.owner !== "") { + writer.uint32(18).string(message.owner); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNFTsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryNFTsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classId = reader.string(); + break; + + case 2: + message.owner = reader.string(); + break; + + case 3: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryNFTsRequest { + const message = createBaseQueryNFTsRequest(); + message.classId = object.classId ?? ""; + message.owner = object.owner ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryNFTsResponse(): QueryNFTsResponse { + return { + nfts: [], + pagination: undefined + }; +} + +export const QueryNFTsResponse = { + encode(message: QueryNFTsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.nfts) { + NFT.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNFTsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryNFTsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.nfts.push(NFT.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryNFTsResponse { + const message = createBaseQueryNFTsResponse(); + message.nfts = object.nfts?.map(e => NFT.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryNFTRequest(): QueryNFTRequest { + return { + classId: "", + id: "" + }; +} + +export const QueryNFTRequest = { + encode(message: QueryNFTRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.classId !== "") { + writer.uint32(10).string(message.classId); + } + + if (message.id !== "") { + writer.uint32(18).string(message.id); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNFTRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryNFTRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classId = reader.string(); + break; + + case 2: + message.id = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryNFTRequest { + const message = createBaseQueryNFTRequest(); + message.classId = object.classId ?? ""; + message.id = object.id ?? ""; + return message; + } + +}; + +function createBaseQueryNFTResponse(): QueryNFTResponse { + return { + nft: undefined + }; +} + +export const QueryNFTResponse = { + encode(message: QueryNFTResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nft !== undefined) { + NFT.encode(message.nft, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNFTResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryNFTResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.nft = NFT.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryNFTResponse { + const message = createBaseQueryNFTResponse(); + message.nft = object.nft !== undefined && object.nft !== null ? NFT.fromPartial(object.nft) : undefined; + return message; + } + +}; + +function createBaseQueryClassRequest(): QueryClassRequest { + return { + classId: "" + }; +} + +export const QueryClassRequest = { + encode(message: QueryClassRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.classId !== "") { + writer.uint32(10).string(message.classId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClassRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClassRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClassRequest { + const message = createBaseQueryClassRequest(); + message.classId = object.classId ?? ""; + return message; + } + +}; + +function createBaseQueryClassResponse(): QueryClassResponse { + return { + class: undefined + }; +} + +export const QueryClassResponse = { + encode(message: QueryClassResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.class !== undefined) { + Class.encode(message.class, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClassResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClassResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.class = Class.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClassResponse { + const message = createBaseQueryClassResponse(); + message.class = object.class !== undefined && object.class !== null ? Class.fromPartial(object.class) : undefined; + return message; + } + +}; + +function createBaseQueryClassesRequest(): QueryClassesRequest { + return { + pagination: undefined + }; +} + +export const QueryClassesRequest = { + encode(message: QueryClassesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClassesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClassesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClassesRequest { + const message = createBaseQueryClassesRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryClassesResponse(): QueryClassesResponse { + return { + classes: [], + pagination: undefined + }; +} + +export const QueryClassesResponse = { + encode(message: QueryClassesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.classes) { + Class.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClassesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClassesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classes.push(Class.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClassesResponse { + const message = createBaseQueryClassesResponse(); + message.classes = object.classes?.map(e => Class.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/nft/v1beta1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/nft/v1beta1/tx.rpc.msg.ts new file mode 100644 index 00000000..b6ad14c7 --- /dev/null +++ b/packages/codegen/src/cosmos/nft/v1beta1/tx.rpc.msg.ts @@ -0,0 +1,24 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgSend, MsgSendResponse } from "./tx"; +/** Msg defines the nft Msg service. */ + +export interface Msg { + /** Send defines a method to send a nft from one account to another account. */ + send(request: MsgSend): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.send = this.send.bind(this); + } + + send(request: MsgSend): Promise { + const data = MsgSend.encode(request).finish(); + const promise = this.rpc.request("cosmos.nft.v1beta1.Msg", "Send", data); + return promise.then(data => MsgSendResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/nft/v1beta1/tx.ts b/packages/codegen/src/cosmos/nft/v1beta1/tx.ts new file mode 100644 index 00000000..9bacc67a --- /dev/null +++ b/packages/codegen/src/cosmos/nft/v1beta1/tx.ts @@ -0,0 +1,140 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** MsgSend represents a message to send a nft from one account to another account. */ + +export interface MsgSend { + /** class_id defines the unique identifier of the nft classification, similar to the contract address of ERC721 */ + classId: string; + /** id defines the unique identification of nft */ + + id: string; + /** sender is the address of the owner of nft */ + + sender: string; + /** receiver is the receiver address of nft */ + + receiver: string; +} +/** MsgSend represents a message to send a nft from one account to another account. */ + +export interface MsgSendSDKType { + class_id: string; + id: string; + sender: string; + receiver: string; +} +/** MsgSendResponse defines the Msg/Send response type. */ + +export interface MsgSendResponse {} +/** MsgSendResponse defines the Msg/Send response type. */ + +export interface MsgSendResponseSDKType {} + +function createBaseMsgSend(): MsgSend { + return { + classId: "", + id: "", + sender: "", + receiver: "" + }; +} + +export const MsgSend = { + encode(message: MsgSend, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.classId !== "") { + writer.uint32(10).string(message.classId); + } + + if (message.id !== "") { + writer.uint32(18).string(message.id); + } + + if (message.sender !== "") { + writer.uint32(26).string(message.sender); + } + + if (message.receiver !== "") { + writer.uint32(34).string(message.receiver); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSend { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSend(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.classId = reader.string(); + break; + + case 2: + message.id = reader.string(); + break; + + case 3: + message.sender = reader.string(); + break; + + case 4: + message.receiver = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSend { + const message = createBaseMsgSend(); + message.classId = object.classId ?? ""; + message.id = object.id ?? ""; + message.sender = object.sender ?? ""; + message.receiver = object.receiver ?? ""; + return message; + } + +}; + +function createBaseMsgSendResponse(): MsgSendResponse { + return {}; +} + +export const MsgSendResponse = { + encode(_: MsgSendResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSendResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSendResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgSendResponse { + const message = createBaseMsgSendResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/orm/v1/orm.ts b/packages/codegen/src/cosmos/orm/v1/orm.ts new file mode 100644 index 00000000..c12f6bd6 --- /dev/null +++ b/packages/codegen/src/cosmos/orm/v1/orm.ts @@ -0,0 +1,353 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** TableDescriptor describes an ORM table. */ + +export interface TableDescriptor { + /** primary_key defines the primary key for the table. */ + primaryKey?: PrimaryKeyDescriptor; + /** index defines one or more secondary indexes. */ + + index: SecondaryIndexDescriptor[]; + /** + * id is a non-zero integer ID that must be unique within the + * tables and singletons in this file. It may be deprecated in the future when this + * can be auto-generated. + */ + + id: number; +} +/** TableDescriptor describes an ORM table. */ + +export interface TableDescriptorSDKType { + primary_key?: PrimaryKeyDescriptorSDKType; + index: SecondaryIndexDescriptorSDKType[]; + id: number; +} +/** PrimaryKeyDescriptor describes a table primary key. */ + +export interface PrimaryKeyDescriptor { + /** + * fields is a comma-separated list of fields in the primary key. Spaces are + * not allowed. Supported field types, their encodings, and any applicable constraints + * are described below. + * - uint32 are encoded as 2,3,4 or 5 bytes using a compact encoding that + * is suitable for sorted iteration (not varint encoding). This type is + * well-suited for small integers. + * - uint64 are encoded as 2,4,6 or 9 bytes using a compact encoding that + * is suitable for sorted iteration (not varint encoding). This type is + * well-suited for small integers such as auto-incrementing sequences. + * - fixed32, fixed64 are encoded as big-endian fixed width bytes and support + * sorted iteration. These types are well-suited for encoding fixed with + * decimals as integers. + * - string's are encoded as raw bytes in terminal key segments and null-terminated + * in non-terminal segments. Null characters are thus forbidden in strings. + * string fields support sorted iteration. + * - bytes are encoded as raw bytes in terminal segments and length-prefixed + * with a 32-bit unsigned varint in non-terminal segments. + * - int32, sint32, int64, sint64, sfixed32, sfixed64 are encoded as fixed width bytes with + * an encoding that enables sorted iteration. + * - google.protobuf.Timestamp and google.protobuf.Duration are encoded + * as 12 bytes using an encoding that enables sorted iteration. + * - enum fields are encoded using varint encoding and do not support sorted + * iteration. + * - bool fields are encoded as a single byte 0 or 1. + * + * All other fields types are unsupported in keys including repeated and + * oneof fields. + * + * Primary keys are prefixed by the varint encoded table id and the byte 0x0 + * plus any additional prefix specified by the schema. + */ + fields: string; + /** + * auto_increment specifies that the primary key is generated by an + * auto-incrementing integer. If this is set to true fields must only + * contain one field of that is of type uint64. + */ + + autoIncrement: boolean; +} +/** PrimaryKeyDescriptor describes a table primary key. */ + +export interface PrimaryKeyDescriptorSDKType { + fields: string; + auto_increment: boolean; +} +/** PrimaryKeyDescriptor describes a table secondary index. */ + +export interface SecondaryIndexDescriptor { + /** + * fields is a comma-separated list of fields in the index. The supported + * field types are the same as those for PrimaryKeyDescriptor.fields. + * Index keys are prefixed by the varint encoded table id and the varint + * encoded index id plus any additional prefix specified by the schema. + * + * In addition the the field segments, non-unique index keys are suffixed with + * any additional primary key fields not present in the index fields so that the + * primary key can be reconstructed. Unique indexes instead of being suffixed + * store the remaining primary key fields in the value.. + */ + fields: string; + /** + * id is a non-zero integer ID that must be unique within the indexes for this + * table and less than 32768. It may be deprecated in the future when this can + * be auto-generated. + */ + + id: number; + /** unique specifies that this an unique index. */ + + unique: boolean; +} +/** PrimaryKeyDescriptor describes a table secondary index. */ + +export interface SecondaryIndexDescriptorSDKType { + fields: string; + id: number; + unique: boolean; +} +/** TableDescriptor describes an ORM singleton table which has at most one instance. */ + +export interface SingletonDescriptor { + /** + * id is a non-zero integer ID that must be unique within the + * tables and singletons in this file. It may be deprecated in the future when this + * can be auto-generated. + */ + id: number; +} +/** TableDescriptor describes an ORM singleton table which has at most one instance. */ + +export interface SingletonDescriptorSDKType { + id: number; +} + +function createBaseTableDescriptor(): TableDescriptor { + return { + primaryKey: undefined, + index: [], + id: 0 + }; +} + +export const TableDescriptor = { + encode(message: TableDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.primaryKey !== undefined) { + PrimaryKeyDescriptor.encode(message.primaryKey, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.index) { + SecondaryIndexDescriptor.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.id !== 0) { + writer.uint32(24).uint32(message.id); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TableDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTableDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.primaryKey = PrimaryKeyDescriptor.decode(reader, reader.uint32()); + break; + + case 2: + message.index.push(SecondaryIndexDescriptor.decode(reader, reader.uint32())); + break; + + case 3: + message.id = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TableDescriptor { + const message = createBaseTableDescriptor(); + message.primaryKey = object.primaryKey !== undefined && object.primaryKey !== null ? PrimaryKeyDescriptor.fromPartial(object.primaryKey) : undefined; + message.index = object.index?.map(e => SecondaryIndexDescriptor.fromPartial(e)) || []; + message.id = object.id ?? 0; + return message; + } + +}; + +function createBasePrimaryKeyDescriptor(): PrimaryKeyDescriptor { + return { + fields: "", + autoIncrement: false + }; +} + +export const PrimaryKeyDescriptor = { + encode(message: PrimaryKeyDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fields !== "") { + writer.uint32(10).string(message.fields); + } + + if (message.autoIncrement === true) { + writer.uint32(16).bool(message.autoIncrement); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PrimaryKeyDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePrimaryKeyDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.fields = reader.string(); + break; + + case 2: + message.autoIncrement = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PrimaryKeyDescriptor { + const message = createBasePrimaryKeyDescriptor(); + message.fields = object.fields ?? ""; + message.autoIncrement = object.autoIncrement ?? false; + return message; + } + +}; + +function createBaseSecondaryIndexDescriptor(): SecondaryIndexDescriptor { + return { + fields: "", + id: 0, + unique: false + }; +} + +export const SecondaryIndexDescriptor = { + encode(message: SecondaryIndexDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fields !== "") { + writer.uint32(10).string(message.fields); + } + + if (message.id !== 0) { + writer.uint32(16).uint32(message.id); + } + + if (message.unique === true) { + writer.uint32(24).bool(message.unique); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SecondaryIndexDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSecondaryIndexDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.fields = reader.string(); + break; + + case 2: + message.id = reader.uint32(); + break; + + case 3: + message.unique = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SecondaryIndexDescriptor { + const message = createBaseSecondaryIndexDescriptor(); + message.fields = object.fields ?? ""; + message.id = object.id ?? 0; + message.unique = object.unique ?? false; + return message; + } + +}; + +function createBaseSingletonDescriptor(): SingletonDescriptor { + return { + id: 0 + }; +} + +export const SingletonDescriptor = { + encode(message: SingletonDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== 0) { + writer.uint32(8).uint32(message.id); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SingletonDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSingletonDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.id = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SingletonDescriptor { + const message = createBaseSingletonDescriptor(); + message.id = object.id ?? 0; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/orm/v1alpha1/schema.ts b/packages/codegen/src/cosmos/orm/v1alpha1/schema.ts new file mode 100644 index 00000000..51df7212 --- /dev/null +++ b/packages/codegen/src/cosmos/orm/v1alpha1/schema.ts @@ -0,0 +1,268 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** StorageType */ + +export enum StorageType { + /** + * STORAGE_TYPE_DEFAULT_UNSPECIFIED - STORAGE_TYPE_DEFAULT_UNSPECIFIED indicates the persistent + * KV-storage where primary key entries are stored in merkle-tree + * backed commitment storage and indexes and seqs are stored in + * fast index storage. Note that the Cosmos SDK before store/v2alpha1 + * does not support this. + */ + STORAGE_TYPE_DEFAULT_UNSPECIFIED = 0, + + /** + * STORAGE_TYPE_MEMORY - STORAGE_TYPE_MEMORY indicates in-memory storage that will be + * reloaded every time an app restarts. Tables with this type of storage + * will by default be ignored when importing and exporting a module's + * state from JSON. + */ + STORAGE_TYPE_MEMORY = 1, + + /** + * STORAGE_TYPE_TRANSIENT - STORAGE_TYPE_TRANSIENT indicates transient storage that is reset + * at the end of every block. Tables with this type of storage + * will by default be ignored when importing and exporting a module's + * state from JSON. + */ + STORAGE_TYPE_TRANSIENT = 2, + + /** + * STORAGE_TYPE_INDEX - STORAGE_TYPE_INDEX indicates persistent storage which is not backed + * by a merkle-tree and won't affect the app hash. Note that the Cosmos SDK + * before store/v2alpha1 does not support this. + */ + STORAGE_TYPE_INDEX = 3, + + /** + * STORAGE_TYPE_COMMITMENT - STORAGE_TYPE_INDEX indicates persistent storage which is backed by + * a merkle-tree. With this type of storage, both primary and index keys + * will affect the app hash and this is generally less efficient + * than using STORAGE_TYPE_DEFAULT_UNSPECIFIED which separates index + * keys into index storage. Note that modules built with the + * Cosmos SDK before store/v2alpha1 must specify STORAGE_TYPE_COMMITMENT + * instead of STORAGE_TYPE_DEFAULT_UNSPECIFIED or STORAGE_TYPE_INDEX + * because this is the only type of persistent storage available. + */ + STORAGE_TYPE_COMMITMENT = 4, + UNRECOGNIZED = -1, +} +export const StorageTypeSDKType = StorageType; +export function storageTypeFromJSON(object: any): StorageType { + switch (object) { + case 0: + case "STORAGE_TYPE_DEFAULT_UNSPECIFIED": + return StorageType.STORAGE_TYPE_DEFAULT_UNSPECIFIED; + + case 1: + case "STORAGE_TYPE_MEMORY": + return StorageType.STORAGE_TYPE_MEMORY; + + case 2: + case "STORAGE_TYPE_TRANSIENT": + return StorageType.STORAGE_TYPE_TRANSIENT; + + case 3: + case "STORAGE_TYPE_INDEX": + return StorageType.STORAGE_TYPE_INDEX; + + case 4: + case "STORAGE_TYPE_COMMITMENT": + return StorageType.STORAGE_TYPE_COMMITMENT; + + case -1: + case "UNRECOGNIZED": + default: + return StorageType.UNRECOGNIZED; + } +} +export function storageTypeToJSON(object: StorageType): string { + switch (object) { + case StorageType.STORAGE_TYPE_DEFAULT_UNSPECIFIED: + return "STORAGE_TYPE_DEFAULT_UNSPECIFIED"; + + case StorageType.STORAGE_TYPE_MEMORY: + return "STORAGE_TYPE_MEMORY"; + + case StorageType.STORAGE_TYPE_TRANSIENT: + return "STORAGE_TYPE_TRANSIENT"; + + case StorageType.STORAGE_TYPE_INDEX: + return "STORAGE_TYPE_INDEX"; + + case StorageType.STORAGE_TYPE_COMMITMENT: + return "STORAGE_TYPE_COMMITMENT"; + + case StorageType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** ModuleSchemaDescriptor describe's a module's ORM schema. */ + +export interface ModuleSchemaDescriptor { + schemaFile: ModuleSchemaDescriptor_FileEntry[]; + /** + * prefix is an optional prefix that precedes all keys in this module's + * store. + */ + + prefix: Uint8Array; +} +/** ModuleSchemaDescriptor describe's a module's ORM schema. */ + +export interface ModuleSchemaDescriptorSDKType { + schema_file: ModuleSchemaDescriptor_FileEntrySDKType[]; + prefix: Uint8Array; +} +/** FileEntry describes an ORM file used in a module. */ + +export interface ModuleSchemaDescriptor_FileEntry { + /** + * id is a prefix that will be varint encoded and prepended to all the + * table keys specified in the file's tables. + */ + id: number; + /** + * proto_file_name is the name of a file .proto in that contains + * table definitions. The .proto file must be in a package that the + * module has referenced using cosmos.app.v1.ModuleDescriptor.use_package. + */ + + protoFileName: string; + /** + * storage_type optionally indicates the type of storage this file's + * tables should used. If it is left unspecified, the default KV-storage + * of the app will be used. + */ + + storageType: StorageType; +} +/** FileEntry describes an ORM file used in a module. */ + +export interface ModuleSchemaDescriptor_FileEntrySDKType { + id: number; + proto_file_name: string; + storage_type: StorageType; +} + +function createBaseModuleSchemaDescriptor(): ModuleSchemaDescriptor { + return { + schemaFile: [], + prefix: new Uint8Array() + }; +} + +export const ModuleSchemaDescriptor = { + encode(message: ModuleSchemaDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.schemaFile) { + ModuleSchemaDescriptor_FileEntry.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.prefix.length !== 0) { + writer.uint32(18).bytes(message.prefix); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleSchemaDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleSchemaDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.schemaFile.push(ModuleSchemaDescriptor_FileEntry.decode(reader, reader.uint32())); + break; + + case 2: + message.prefix = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ModuleSchemaDescriptor { + const message = createBaseModuleSchemaDescriptor(); + message.schemaFile = object.schemaFile?.map(e => ModuleSchemaDescriptor_FileEntry.fromPartial(e)) || []; + message.prefix = object.prefix ?? new Uint8Array(); + return message; + } + +}; + +function createBaseModuleSchemaDescriptor_FileEntry(): ModuleSchemaDescriptor_FileEntry { + return { + id: 0, + protoFileName: "", + storageType: 0 + }; +} + +export const ModuleSchemaDescriptor_FileEntry = { + encode(message: ModuleSchemaDescriptor_FileEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== 0) { + writer.uint32(8).uint32(message.id); + } + + if (message.protoFileName !== "") { + writer.uint32(18).string(message.protoFileName); + } + + if (message.storageType !== 0) { + writer.uint32(24).int32(message.storageType); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleSchemaDescriptor_FileEntry { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleSchemaDescriptor_FileEntry(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.id = reader.uint32(); + break; + + case 2: + message.protoFileName = reader.string(); + break; + + case 3: + message.storageType = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ModuleSchemaDescriptor_FileEntry { + const message = createBaseModuleSchemaDescriptor_FileEntry(); + message.id = object.id ?? 0; + message.protoFileName = object.protoFileName ?? ""; + message.storageType = object.storageType ?? 0; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/params/v1beta1/params.ts b/packages/codegen/src/cosmos/params/v1beta1/params.ts new file mode 100644 index 00000000..73eaaac5 --- /dev/null +++ b/packages/codegen/src/cosmos/params/v1beta1/params.ts @@ -0,0 +1,166 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** ParameterChangeProposal defines a proposal to change one or more parameters. */ + +export interface ParameterChangeProposal { + title: string; + description: string; + changes: ParamChange[]; +} +/** ParameterChangeProposal defines a proposal to change one or more parameters. */ + +export interface ParameterChangeProposalSDKType { + title: string; + description: string; + changes: ParamChangeSDKType[]; +} +/** + * ParamChange defines an individual parameter change, for use in + * ParameterChangeProposal. + */ + +export interface ParamChange { + subspace: string; + key: string; + value: string; +} +/** + * ParamChange defines an individual parameter change, for use in + * ParameterChangeProposal. + */ + +export interface ParamChangeSDKType { + subspace: string; + key: string; + value: string; +} + +function createBaseParameterChangeProposal(): ParameterChangeProposal { + return { + title: "", + description: "", + changes: [] + }; +} + +export const ParameterChangeProposal = { + encode(message: ParameterChangeProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + for (const v of message.changes) { + ParamChange.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ParameterChangeProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParameterChangeProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.changes.push(ParamChange.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ParameterChangeProposal { + const message = createBaseParameterChangeProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.changes = object.changes?.map(e => ParamChange.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseParamChange(): ParamChange { + return { + subspace: "", + key: "", + value: "" + }; +} + +export const ParamChange = { + encode(message: ParamChange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subspace !== "") { + writer.uint32(10).string(message.subspace); + } + + if (message.key !== "") { + writer.uint32(18).string(message.key); + } + + if (message.value !== "") { + writer.uint32(26).string(message.value); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ParamChange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParamChange(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.subspace = reader.string(); + break; + + case 2: + message.key = reader.string(); + break; + + case 3: + message.value = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ParamChange { + const message = createBaseParamChange(); + message.subspace = object.subspace ?? ""; + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/params/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/params/v1beta1/query.lcd.ts new file mode 100644 index 00000000..7feab29f --- /dev/null +++ b/packages/codegen/src/cosmos/params/v1beta1/query.lcd.ts @@ -0,0 +1,43 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryParamsRequest, QueryParamsResponseSDKType, QuerySubspacesRequest, QuerySubspacesResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.params = this.params.bind(this); + this.subspaces = this.subspaces.bind(this); + } + /* Params queries a specific parameter of a module, given its subspace and + key. */ + + + async params(params: QueryParamsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.subspace !== "undefined") { + options.params.subspace = params.subspace; + } + + if (typeof params?.key !== "undefined") { + options.params.key = params.key; + } + + const endpoint = `cosmos/params/v1beta1/params`; + return await this.req.get(endpoint, options); + } + /* Subspaces queries for all registered subspaces and all keys for a subspace. */ + + + async subspaces(_params: QuerySubspacesRequest = {}): Promise { + const endpoint = `cosmos/params/v1beta1/subspaces`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/params/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/params/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..4f505514 --- /dev/null +++ b/packages/codegen/src/cosmos/params/v1beta1/query.rpc.Query.ts @@ -0,0 +1,52 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryParamsRequest, QueryParamsResponse, QuerySubspacesRequest, QuerySubspacesResponse } from "./query"; +/** Query defines the gRPC querier service. */ + +export interface Query { + /** + * Params queries a specific parameter of a module, given its subspace and + * key. + */ + params(request: QueryParamsRequest): Promise; + /** Subspaces queries for all registered subspaces and all keys for a subspace. */ + + subspaces(request?: QuerySubspacesRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.params = this.params.bind(this); + this.subspaces = this.subspaces.bind(this); + } + + params(request: QueryParamsRequest): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.params.v1beta1.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + subspaces(request: QuerySubspacesRequest = {}): Promise { + const data = QuerySubspacesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.params.v1beta1.Query", "Subspaces", data); + return promise.then(data => QuerySubspacesResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + params(request: QueryParamsRequest): Promise { + return queryService.params(request); + }, + + subspaces(request?: QuerySubspacesRequest): Promise { + return queryService.subspaces(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/params/v1beta1/query.ts b/packages/codegen/src/cosmos/params/v1beta1/query.ts new file mode 100644 index 00000000..6501d1c2 --- /dev/null +++ b/packages/codegen/src/cosmos/params/v1beta1/query.ts @@ -0,0 +1,309 @@ +import { ParamChange, ParamChangeSDKType } from "./params"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryParamsRequest is request type for the Query/Params RPC method. */ + +export interface QueryParamsRequest { + /** subspace defines the module to query the parameter for. */ + subspace: string; + /** key defines the key of the parameter in the subspace. */ + + key: string; +} +/** QueryParamsRequest is request type for the Query/Params RPC method. */ + +export interface QueryParamsRequestSDKType { + subspace: string; + key: string; +} +/** QueryParamsResponse is response type for the Query/Params RPC method. */ + +export interface QueryParamsResponse { + /** param defines the queried parameter. */ + param?: ParamChange; +} +/** QueryParamsResponse is response type for the Query/Params RPC method. */ + +export interface QueryParamsResponseSDKType { + param?: ParamChangeSDKType; +} +/** + * QuerySubspacesRequest defines a request type for querying for all registered + * subspaces and all keys for a subspace. + */ + +export interface QuerySubspacesRequest {} +/** + * QuerySubspacesRequest defines a request type for querying for all registered + * subspaces and all keys for a subspace. + */ + +export interface QuerySubspacesRequestSDKType {} +/** + * QuerySubspacesResponse defines the response types for querying for all + * registered subspaces and all keys for a subspace. + */ + +export interface QuerySubspacesResponse { + subspaces: Subspace[]; +} +/** + * QuerySubspacesResponse defines the response types for querying for all + * registered subspaces and all keys for a subspace. + */ + +export interface QuerySubspacesResponseSDKType { + subspaces: SubspaceSDKType[]; +} +/** + * Subspace defines a parameter subspace name and all the keys that exist for + * the subspace. + */ + +export interface Subspace { + subspace: string; + keys: string[]; +} +/** + * Subspace defines a parameter subspace name and all the keys that exist for + * the subspace. + */ + +export interface SubspaceSDKType { + subspace: string; + keys: string[]; +} + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return { + subspace: "", + key: "" + }; +} + +export const QueryParamsRequest = { + encode(message: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subspace !== "") { + writer.uint32(10).string(message.subspace); + } + + if (message.key !== "") { + writer.uint32(18).string(message.key); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.subspace = reader.string(); + break; + + case 2: + message.key = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + message.subspace = object.subspace ?? ""; + message.key = object.key ?? ""; + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + param: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.param !== undefined) { + ParamChange.encode(message.param, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.param = ParamChange.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.param = object.param !== undefined && object.param !== null ? ParamChange.fromPartial(object.param) : undefined; + return message; + } + +}; + +function createBaseQuerySubspacesRequest(): QuerySubspacesRequest { + return {}; +} + +export const QuerySubspacesRequest = { + encode(_: QuerySubspacesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySubspacesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySubspacesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QuerySubspacesRequest { + const message = createBaseQuerySubspacesRequest(); + return message; + } + +}; + +function createBaseQuerySubspacesResponse(): QuerySubspacesResponse { + return { + subspaces: [] + }; +} + +export const QuerySubspacesResponse = { + encode(message: QuerySubspacesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.subspaces) { + Subspace.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySubspacesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySubspacesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.subspaces.push(Subspace.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySubspacesResponse { + const message = createBaseQuerySubspacesResponse(); + message.subspaces = object.subspaces?.map(e => Subspace.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseSubspace(): Subspace { + return { + subspace: "", + keys: [] + }; +} + +export const Subspace = { + encode(message: Subspace, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subspace !== "") { + writer.uint32(10).string(message.subspace); + } + + for (const v of message.keys) { + writer.uint32(18).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Subspace { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSubspace(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.subspace = reader.string(); + break; + + case 2: + message.keys.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Subspace { + const message = createBaseSubspace(); + message.subspace = object.subspace ?? ""; + message.keys = object.keys?.map(e => e) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/rpc.query.ts b/packages/codegen/src/cosmos/rpc.query.ts new file mode 100644 index 00000000..ac1a16a8 --- /dev/null +++ b/packages/codegen/src/cosmos/rpc.query.ts @@ -0,0 +1,68 @@ +import { Tendermint34Client, HttpEndpoint } from "@cosmjs/tendermint-rpc"; +import { QueryClient } from "@cosmjs/stargate"; +export const createRPCQueryClient = async ({ + rpcEndpoint +}: { + rpcEndpoint: string | HttpEndpoint; +}) => { + const tmClient = await Tendermint34Client.connect(rpcEndpoint); + const client = new QueryClient(tmClient); + return { + cosmos: { + app: { + v1alpha1: (await import("./app/v1alpha1/query.rpc.Query")).createRpcQueryExtension(client) + }, + auth: { + v1beta1: (await import("./auth/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + authz: { + v1beta1: (await import("./authz/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + bank: { + v1beta1: (await import("./bank/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + base: { + tendermint: { + v1beta1: (await import("./base/tendermint/v1beta1/query.rpc.Service")).createRpcQueryExtension(client) + } + }, + distribution: { + v1beta1: (await import("./distribution/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + evidence: { + v1beta1: (await import("./evidence/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + feegrant: { + v1beta1: (await import("./feegrant/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + gov: { + v1: (await import("./gov/v1/query.rpc.Query")).createRpcQueryExtension(client), + v1beta1: (await import("./gov/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + group: { + v1: (await import("./group/v1/query.rpc.Query")).createRpcQueryExtension(client) + }, + mint: { + v1beta1: (await import("./mint/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + nft: { + v1beta1: (await import("./nft/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + params: { + v1beta1: (await import("./params/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + slashing: { + v1beta1: (await import("./slashing/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + staking: { + v1beta1: (await import("./staking/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + tx: { + v1beta1: (await import("./tx/v1beta1/service.rpc.Service")).createRpcQueryExtension(client) + }, + upgrade: { + v1beta1: (await import("./upgrade/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + } + } + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/rpc.tx.ts b/packages/codegen/src/cosmos/rpc.tx.ts new file mode 100644 index 00000000..3a4dbf9b --- /dev/null +++ b/packages/codegen/src/cosmos/rpc.tx.ts @@ -0,0 +1,49 @@ +import { Rpc } from "../helpers"; +export const createRPCMsgClient = async ({ + rpc +}: { + rpc: Rpc; +}) => ({ + cosmos: { + authz: { + v1beta1: new (await import("./authz/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + bank: { + v1beta1: new (await import("./bank/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + crisis: { + v1beta1: new (await import("./crisis/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + distribution: { + v1beta1: new (await import("./distribution/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + evidence: { + v1beta1: new (await import("./evidence/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + feegrant: { + v1beta1: new (await import("./feegrant/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + gov: { + v1: new (await import("./gov/v1/tx.rpc.msg")).MsgClientImpl(rpc), + v1beta1: new (await import("./gov/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + group: { + v1: new (await import("./group/v1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + nft: { + v1beta1: new (await import("./nft/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + slashing: { + v1beta1: new (await import("./slashing/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + staking: { + v1beta1: new (await import("./staking/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + upgrade: { + v1beta1: new (await import("./upgrade/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + vesting: { + v1beta1: new (await import("./vesting/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + } + } +}); \ No newline at end of file diff --git a/packages/codegen/src/cosmos/slashing/v1beta1/genesis.ts b/packages/codegen/src/cosmos/slashing/v1beta1/genesis.ts new file mode 100644 index 00000000..ef677c7b --- /dev/null +++ b/packages/codegen/src/cosmos/slashing/v1beta1/genesis.ts @@ -0,0 +1,309 @@ +import { Params, ParamsSDKType, ValidatorSigningInfo, ValidatorSigningInfoSDKType } from "./slashing"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** GenesisState defines the slashing module's genesis state. */ + +export interface GenesisState { + /** params defines all the paramaters of related to deposit. */ + params?: Params; + /** + * signing_infos represents a map between validator addresses and their + * signing infos. + */ + + signingInfos: SigningInfo[]; + /** + * missed_blocks represents a map between validator addresses and their + * missed blocks. + */ + + missedBlocks: ValidatorMissedBlocks[]; +} +/** GenesisState defines the slashing module's genesis state. */ + +export interface GenesisStateSDKType { + params?: ParamsSDKType; + signing_infos: SigningInfoSDKType[]; + missed_blocks: ValidatorMissedBlocksSDKType[]; +} +/** SigningInfo stores validator signing info of corresponding address. */ + +export interface SigningInfo { + /** address is the validator address. */ + address: string; + /** validator_signing_info represents the signing info of this validator. */ + + validatorSigningInfo?: ValidatorSigningInfo; +} +/** SigningInfo stores validator signing info of corresponding address. */ + +export interface SigningInfoSDKType { + address: string; + validator_signing_info?: ValidatorSigningInfoSDKType; +} +/** + * ValidatorMissedBlocks contains array of missed blocks of corresponding + * address. + */ + +export interface ValidatorMissedBlocks { + /** address is the validator address. */ + address: string; + /** missed_blocks is an array of missed blocks by the validator. */ + + missedBlocks: MissedBlock[]; +} +/** + * ValidatorMissedBlocks contains array of missed blocks of corresponding + * address. + */ + +export interface ValidatorMissedBlocksSDKType { + address: string; + missed_blocks: MissedBlockSDKType[]; +} +/** MissedBlock contains height and missed status as boolean. */ + +export interface MissedBlock { + /** index is the height at which the block was missed. */ + index: Long; + /** missed is the missed status. */ + + missed: boolean; +} +/** MissedBlock contains height and missed status as boolean. */ + +export interface MissedBlockSDKType { + index: Long; + missed: boolean; +} + +function createBaseGenesisState(): GenesisState { + return { + params: undefined, + signingInfos: [], + missedBlocks: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.signingInfos) { + SigningInfo.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.missedBlocks) { + ValidatorMissedBlocks.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + case 2: + message.signingInfos.push(SigningInfo.decode(reader, reader.uint32())); + break; + + case 3: + message.missedBlocks.push(ValidatorMissedBlocks.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + message.signingInfos = object.signingInfos?.map(e => SigningInfo.fromPartial(e)) || []; + message.missedBlocks = object.missedBlocks?.map(e => ValidatorMissedBlocks.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseSigningInfo(): SigningInfo { + return { + address: "", + validatorSigningInfo: undefined + }; +} + +export const SigningInfo = { + encode(message: SigningInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.validatorSigningInfo !== undefined) { + ValidatorSigningInfo.encode(message.validatorSigningInfo, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SigningInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSigningInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.validatorSigningInfo = ValidatorSigningInfo.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SigningInfo { + const message = createBaseSigningInfo(); + message.address = object.address ?? ""; + message.validatorSigningInfo = object.validatorSigningInfo !== undefined && object.validatorSigningInfo !== null ? ValidatorSigningInfo.fromPartial(object.validatorSigningInfo) : undefined; + return message; + } + +}; + +function createBaseValidatorMissedBlocks(): ValidatorMissedBlocks { + return { + address: "", + missedBlocks: [] + }; +} + +export const ValidatorMissedBlocks = { + encode(message: ValidatorMissedBlocks, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + for (const v of message.missedBlocks) { + MissedBlock.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorMissedBlocks { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorMissedBlocks(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.missedBlocks.push(MissedBlock.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorMissedBlocks { + const message = createBaseValidatorMissedBlocks(); + message.address = object.address ?? ""; + message.missedBlocks = object.missedBlocks?.map(e => MissedBlock.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMissedBlock(): MissedBlock { + return { + index: Long.ZERO, + missed: false + }; +} + +export const MissedBlock = { + encode(message: MissedBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.index.isZero()) { + writer.uint32(8).int64(message.index); + } + + if (message.missed === true) { + writer.uint32(16).bool(message.missed); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MissedBlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMissedBlock(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.index = (reader.int64() as Long); + break; + + case 2: + message.missed = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MissedBlock { + const message = createBaseMissedBlock(); + message.index = object.index !== undefined && object.index !== null ? Long.fromValue(object.index) : Long.ZERO; + message.missed = object.missed ?? false; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/slashing/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/slashing/v1beta1/query.lcd.ts new file mode 100644 index 00000000..346fd6cc --- /dev/null +++ b/packages/codegen/src/cosmos/slashing/v1beta1/query.lcd.ts @@ -0,0 +1,49 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryParamsRequest, QueryParamsResponseSDKType, QuerySigningInfoRequest, QuerySigningInfoResponseSDKType, QuerySigningInfosRequest, QuerySigningInfosResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.params = this.params.bind(this); + this.signingInfo = this.signingInfo.bind(this); + this.signingInfos = this.signingInfos.bind(this); + } + /* Params queries the parameters of slashing module */ + + + async params(_params: QueryParamsRequest = {}): Promise { + const endpoint = `cosmos/slashing/v1beta1/params`; + return await this.req.get(endpoint); + } + /* SigningInfo queries the signing info of given cons address */ + + + async signingInfo(params: QuerySigningInfoRequest): Promise { + const endpoint = `cosmos/slashing/v1beta1/signing_infos/${params.consAddress}`; + return await this.req.get(endpoint); + } + /* SigningInfos queries signing info of all validators */ + + + async signingInfos(params: QuerySigningInfosRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/slashing/v1beta1/signing_infos`; + return await this.req.get(endpoint, options); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/slashing/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/slashing/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..94cb5ce4 --- /dev/null +++ b/packages/codegen/src/cosmos/slashing/v1beta1/query.rpc.Query.ts @@ -0,0 +1,65 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryParamsRequest, QueryParamsResponse, QuerySigningInfoRequest, QuerySigningInfoResponse, QuerySigningInfosRequest, QuerySigningInfosResponse } from "./query"; +/** Query provides defines the gRPC querier service */ + +export interface Query { + /** Params queries the parameters of slashing module */ + params(request?: QueryParamsRequest): Promise; + /** SigningInfo queries the signing info of given cons address */ + + signingInfo(request: QuerySigningInfoRequest): Promise; + /** SigningInfos queries signing info of all validators */ + + signingInfos(request?: QuerySigningInfosRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.params = this.params.bind(this); + this.signingInfo = this.signingInfo.bind(this); + this.signingInfos = this.signingInfos.bind(this); + } + + params(request: QueryParamsRequest = {}): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.slashing.v1beta1.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + signingInfo(request: QuerySigningInfoRequest): Promise { + const data = QuerySigningInfoRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.slashing.v1beta1.Query", "SigningInfo", data); + return promise.then(data => QuerySigningInfoResponse.decode(new _m0.Reader(data))); + } + + signingInfos(request: QuerySigningInfosRequest = { + pagination: undefined + }): Promise { + const data = QuerySigningInfosRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.slashing.v1beta1.Query", "SigningInfos", data); + return promise.then(data => QuerySigningInfosResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + params(request?: QueryParamsRequest): Promise { + return queryService.params(request); + }, + + signingInfo(request: QuerySigningInfoRequest): Promise { + return queryService.signingInfo(request); + }, + + signingInfos(request?: QuerySigningInfosRequest): Promise { + return queryService.signingInfos(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/slashing/v1beta1/query.ts b/packages/codegen/src/cosmos/slashing/v1beta1/query.ts new file mode 100644 index 00000000..98dd5ea5 --- /dev/null +++ b/packages/codegen/src/cosmos/slashing/v1beta1/query.ts @@ -0,0 +1,358 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Params, ParamsSDKType, ValidatorSigningInfo, ValidatorSigningInfoSDKType } from "./slashing"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** QueryParamsRequest is the request type for the Query/Params RPC method */ + +export interface QueryParamsRequest {} +/** QueryParamsRequest is the request type for the Query/Params RPC method */ + +export interface QueryParamsRequestSDKType {} +/** QueryParamsResponse is the response type for the Query/Params RPC method */ + +export interface QueryParamsResponse { + params?: Params; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method */ + +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +/** + * QuerySigningInfoRequest is the request type for the Query/SigningInfo RPC + * method + */ + +export interface QuerySigningInfoRequest { + /** cons_address is the address to query signing info of */ + consAddress: string; +} +/** + * QuerySigningInfoRequest is the request type for the Query/SigningInfo RPC + * method + */ + +export interface QuerySigningInfoRequestSDKType { + cons_address: string; +} +/** + * QuerySigningInfoResponse is the response type for the Query/SigningInfo RPC + * method + */ + +export interface QuerySigningInfoResponse { + /** val_signing_info is the signing info of requested val cons address */ + valSigningInfo?: ValidatorSigningInfo; +} +/** + * QuerySigningInfoResponse is the response type for the Query/SigningInfo RPC + * method + */ + +export interface QuerySigningInfoResponseSDKType { + val_signing_info?: ValidatorSigningInfoSDKType; +} +/** + * QuerySigningInfosRequest is the request type for the Query/SigningInfos RPC + * method + */ + +export interface QuerySigningInfosRequest { + pagination?: PageRequest; +} +/** + * QuerySigningInfosRequest is the request type for the Query/SigningInfos RPC + * method + */ + +export interface QuerySigningInfosRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QuerySigningInfosResponse is the response type for the Query/SigningInfos RPC + * method + */ + +export interface QuerySigningInfosResponse { + /** info is the signing info of all validators */ + info: ValidatorSigningInfo[]; + pagination?: PageResponse; +} +/** + * QuerySigningInfosResponse is the response type for the Query/SigningInfos RPC + * method + */ + +export interface QuerySigningInfosResponseSDKType { + info: ValidatorSigningInfoSDKType[]; + pagination?: PageResponseSDKType; +} + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + params: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseQuerySigningInfoRequest(): QuerySigningInfoRequest { + return { + consAddress: "" + }; +} + +export const QuerySigningInfoRequest = { + encode(message: QuerySigningInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.consAddress !== "") { + writer.uint32(10).string(message.consAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySigningInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySigningInfoRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.consAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySigningInfoRequest { + const message = createBaseQuerySigningInfoRequest(); + message.consAddress = object.consAddress ?? ""; + return message; + } + +}; + +function createBaseQuerySigningInfoResponse(): QuerySigningInfoResponse { + return { + valSigningInfo: undefined + }; +} + +export const QuerySigningInfoResponse = { + encode(message: QuerySigningInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.valSigningInfo !== undefined) { + ValidatorSigningInfo.encode(message.valSigningInfo, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySigningInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySigningInfoResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.valSigningInfo = ValidatorSigningInfo.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySigningInfoResponse { + const message = createBaseQuerySigningInfoResponse(); + message.valSigningInfo = object.valSigningInfo !== undefined && object.valSigningInfo !== null ? ValidatorSigningInfo.fromPartial(object.valSigningInfo) : undefined; + return message; + } + +}; + +function createBaseQuerySigningInfosRequest(): QuerySigningInfosRequest { + return { + pagination: undefined + }; +} + +export const QuerySigningInfosRequest = { + encode(message: QuerySigningInfosRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySigningInfosRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySigningInfosRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySigningInfosRequest { + const message = createBaseQuerySigningInfosRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQuerySigningInfosResponse(): QuerySigningInfosResponse { + return { + info: [], + pagination: undefined + }; +} + +export const QuerySigningInfosResponse = { + encode(message: QuerySigningInfosResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.info) { + ValidatorSigningInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySigningInfosResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySigningInfosResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.info.push(ValidatorSigningInfo.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySigningInfosResponse { + const message = createBaseQuerySigningInfosResponse(); + message.info = object.info?.map(e => ValidatorSigningInfo.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/slashing/v1beta1/slashing.ts b/packages/codegen/src/cosmos/slashing/v1beta1/slashing.ts new file mode 100644 index 00000000..9ef07b05 --- /dev/null +++ b/packages/codegen/src/cosmos/slashing/v1beta1/slashing.ts @@ -0,0 +1,248 @@ +import { Timestamp } from "../../../google/protobuf/timestamp"; +import { Duration, DurationSDKType } from "../../../google/protobuf/duration"; +import { Long, toTimestamp, fromTimestamp, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * ValidatorSigningInfo defines a validator's signing info for monitoring their + * liveness activity. + */ + +export interface ValidatorSigningInfo { + address: string; + /** Height at which validator was first a candidate OR was unjailed */ + + startHeight: Long; + /** + * Index which is incremented each time the validator was a bonded + * in a block and may have signed a precommit or not. This in conjunction with the + * `SignedBlocksWindow` param determines the index in the `MissedBlocksBitArray`. + */ + + indexOffset: Long; + /** Timestamp until which the validator is jailed due to liveness downtime. */ + + jailedUntil?: Date; + /** + * Whether or not a validator has been tombstoned (killed out of validator set). It is set + * once the validator commits an equivocation or for any other configured misbehiavor. + */ + + tombstoned: boolean; + /** + * A counter kept to avoid unnecessary array reads. + * Note that `Sum(MissedBlocksBitArray)` always equals `MissedBlocksCounter`. + */ + + missedBlocksCounter: Long; +} +/** + * ValidatorSigningInfo defines a validator's signing info for monitoring their + * liveness activity. + */ + +export interface ValidatorSigningInfoSDKType { + address: string; + start_height: Long; + index_offset: Long; + jailed_until?: Date; + tombstoned: boolean; + missed_blocks_counter: Long; +} +/** Params represents the parameters used for by the slashing module. */ + +export interface Params { + signedBlocksWindow: Long; + minSignedPerWindow: Uint8Array; + downtimeJailDuration?: Duration; + slashFractionDoubleSign: Uint8Array; + slashFractionDowntime: Uint8Array; +} +/** Params represents the parameters used for by the slashing module. */ + +export interface ParamsSDKType { + signed_blocks_window: Long; + min_signed_per_window: Uint8Array; + downtime_jail_duration?: DurationSDKType; + slash_fraction_double_sign: Uint8Array; + slash_fraction_downtime: Uint8Array; +} + +function createBaseValidatorSigningInfo(): ValidatorSigningInfo { + return { + address: "", + startHeight: Long.ZERO, + indexOffset: Long.ZERO, + jailedUntil: undefined, + tombstoned: false, + missedBlocksCounter: Long.ZERO + }; +} + +export const ValidatorSigningInfo = { + encode(message: ValidatorSigningInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (!message.startHeight.isZero()) { + writer.uint32(16).int64(message.startHeight); + } + + if (!message.indexOffset.isZero()) { + writer.uint32(24).int64(message.indexOffset); + } + + if (message.jailedUntil !== undefined) { + Timestamp.encode(toTimestamp(message.jailedUntil), writer.uint32(34).fork()).ldelim(); + } + + if (message.tombstoned === true) { + writer.uint32(40).bool(message.tombstoned); + } + + if (!message.missedBlocksCounter.isZero()) { + writer.uint32(48).int64(message.missedBlocksCounter); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorSigningInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorSigningInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.startHeight = (reader.int64() as Long); + break; + + case 3: + message.indexOffset = (reader.int64() as Long); + break; + + case 4: + message.jailedUntil = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 5: + message.tombstoned = reader.bool(); + break; + + case 6: + message.missedBlocksCounter = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorSigningInfo { + const message = createBaseValidatorSigningInfo(); + message.address = object.address ?? ""; + message.startHeight = object.startHeight !== undefined && object.startHeight !== null ? Long.fromValue(object.startHeight) : Long.ZERO; + message.indexOffset = object.indexOffset !== undefined && object.indexOffset !== null ? Long.fromValue(object.indexOffset) : Long.ZERO; + message.jailedUntil = object.jailedUntil ?? undefined; + message.tombstoned = object.tombstoned ?? false; + message.missedBlocksCounter = object.missedBlocksCounter !== undefined && object.missedBlocksCounter !== null ? Long.fromValue(object.missedBlocksCounter) : Long.ZERO; + return message; + } + +}; + +function createBaseParams(): Params { + return { + signedBlocksWindow: Long.ZERO, + minSignedPerWindow: new Uint8Array(), + downtimeJailDuration: undefined, + slashFractionDoubleSign: new Uint8Array(), + slashFractionDowntime: new Uint8Array() + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.signedBlocksWindow.isZero()) { + writer.uint32(8).int64(message.signedBlocksWindow); + } + + if (message.minSignedPerWindow.length !== 0) { + writer.uint32(18).bytes(message.minSignedPerWindow); + } + + if (message.downtimeJailDuration !== undefined) { + Duration.encode(message.downtimeJailDuration, writer.uint32(26).fork()).ldelim(); + } + + if (message.slashFractionDoubleSign.length !== 0) { + writer.uint32(34).bytes(message.slashFractionDoubleSign); + } + + if (message.slashFractionDowntime.length !== 0) { + writer.uint32(42).bytes(message.slashFractionDowntime); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.signedBlocksWindow = (reader.int64() as Long); + break; + + case 2: + message.minSignedPerWindow = reader.bytes(); + break; + + case 3: + message.downtimeJailDuration = Duration.decode(reader, reader.uint32()); + break; + + case 4: + message.slashFractionDoubleSign = reader.bytes(); + break; + + case 5: + message.slashFractionDowntime = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Params { + const message = createBaseParams(); + message.signedBlocksWindow = object.signedBlocksWindow !== undefined && object.signedBlocksWindow !== null ? Long.fromValue(object.signedBlocksWindow) : Long.ZERO; + message.minSignedPerWindow = object.minSignedPerWindow ?? new Uint8Array(); + message.downtimeJailDuration = object.downtimeJailDuration !== undefined && object.downtimeJailDuration !== null ? Duration.fromPartial(object.downtimeJailDuration) : undefined; + message.slashFractionDoubleSign = object.slashFractionDoubleSign ?? new Uint8Array(); + message.slashFractionDowntime = object.slashFractionDowntime ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/slashing/v1beta1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/slashing/v1beta1/tx.rpc.msg.ts new file mode 100644 index 00000000..622712b3 --- /dev/null +++ b/packages/codegen/src/cosmos/slashing/v1beta1/tx.rpc.msg.ts @@ -0,0 +1,28 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgUnjail, MsgUnjailResponse } from "./tx"; +/** Msg defines the slashing Msg service. */ + +export interface Msg { + /** + * Unjail defines a method for unjailing a jailed validator, thus returning + * them into the bonded validator set, so they can begin receiving provisions + * and rewards again. + */ + unjail(request: MsgUnjail): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.unjail = this.unjail.bind(this); + } + + unjail(request: MsgUnjail): Promise { + const data = MsgUnjail.encode(request).finish(); + const promise = this.rpc.request("cosmos.slashing.v1beta1.Msg", "Unjail", data); + return promise.then(data => MsgUnjailResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/slashing/v1beta1/tx.ts b/packages/codegen/src/cosmos/slashing/v1beta1/tx.ts new file mode 100644 index 00000000..2a7e57ef --- /dev/null +++ b/packages/codegen/src/cosmos/slashing/v1beta1/tx.ts @@ -0,0 +1,97 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** MsgUnjail defines the Msg/Unjail request type */ + +export interface MsgUnjail { + validatorAddr: string; +} +/** MsgUnjail defines the Msg/Unjail request type */ + +export interface MsgUnjailSDKType { + validator_addr: string; +} +/** MsgUnjailResponse defines the Msg/Unjail response type */ + +export interface MsgUnjailResponse {} +/** MsgUnjailResponse defines the Msg/Unjail response type */ + +export interface MsgUnjailResponseSDKType {} + +function createBaseMsgUnjail(): MsgUnjail { + return { + validatorAddr: "" + }; +} + +export const MsgUnjail = { + encode(message: MsgUnjail, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddr !== "") { + writer.uint32(10).string(message.validatorAddr); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUnjail { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUnjail(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddr = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUnjail { + const message = createBaseMsgUnjail(); + message.validatorAddr = object.validatorAddr ?? ""; + return message; + } + +}; + +function createBaseMsgUnjailResponse(): MsgUnjailResponse { + return {}; +} + +export const MsgUnjailResponse = { + encode(_: MsgUnjailResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUnjailResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUnjailResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUnjailResponse { + const message = createBaseMsgUnjailResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/staking/v1beta1/authz.ts b/packages/codegen/src/cosmos/staking/v1beta1/authz.ts new file mode 100644 index 00000000..4be02b69 --- /dev/null +++ b/packages/codegen/src/cosmos/staking/v1beta1/authz.ts @@ -0,0 +1,234 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * AuthorizationType defines the type of staking module authorization type + * + * Since: cosmos-sdk 0.43 + */ + +export enum AuthorizationType { + /** AUTHORIZATION_TYPE_UNSPECIFIED - AUTHORIZATION_TYPE_UNSPECIFIED specifies an unknown authorization type */ + AUTHORIZATION_TYPE_UNSPECIFIED = 0, + + /** AUTHORIZATION_TYPE_DELEGATE - AUTHORIZATION_TYPE_DELEGATE defines an authorization type for Msg/Delegate */ + AUTHORIZATION_TYPE_DELEGATE = 1, + + /** AUTHORIZATION_TYPE_UNDELEGATE - AUTHORIZATION_TYPE_UNDELEGATE defines an authorization type for Msg/Undelegate */ + AUTHORIZATION_TYPE_UNDELEGATE = 2, + + /** AUTHORIZATION_TYPE_REDELEGATE - AUTHORIZATION_TYPE_REDELEGATE defines an authorization type for Msg/BeginRedelegate */ + AUTHORIZATION_TYPE_REDELEGATE = 3, + UNRECOGNIZED = -1, +} +export const AuthorizationTypeSDKType = AuthorizationType; +export function authorizationTypeFromJSON(object: any): AuthorizationType { + switch (object) { + case 0: + case "AUTHORIZATION_TYPE_UNSPECIFIED": + return AuthorizationType.AUTHORIZATION_TYPE_UNSPECIFIED; + + case 1: + case "AUTHORIZATION_TYPE_DELEGATE": + return AuthorizationType.AUTHORIZATION_TYPE_DELEGATE; + + case 2: + case "AUTHORIZATION_TYPE_UNDELEGATE": + return AuthorizationType.AUTHORIZATION_TYPE_UNDELEGATE; + + case 3: + case "AUTHORIZATION_TYPE_REDELEGATE": + return AuthorizationType.AUTHORIZATION_TYPE_REDELEGATE; + + case -1: + case "UNRECOGNIZED": + default: + return AuthorizationType.UNRECOGNIZED; + } +} +export function authorizationTypeToJSON(object: AuthorizationType): string { + switch (object) { + case AuthorizationType.AUTHORIZATION_TYPE_UNSPECIFIED: + return "AUTHORIZATION_TYPE_UNSPECIFIED"; + + case AuthorizationType.AUTHORIZATION_TYPE_DELEGATE: + return "AUTHORIZATION_TYPE_DELEGATE"; + + case AuthorizationType.AUTHORIZATION_TYPE_UNDELEGATE: + return "AUTHORIZATION_TYPE_UNDELEGATE"; + + case AuthorizationType.AUTHORIZATION_TYPE_REDELEGATE: + return "AUTHORIZATION_TYPE_REDELEGATE"; + + case AuthorizationType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * StakeAuthorization defines authorization for delegate/undelegate/redelegate. + * + * Since: cosmos-sdk 0.43 + */ + +export interface StakeAuthorization { + /** + * max_tokens specifies the maximum amount of tokens can be delegate to a validator. If it is + * empty, there is no spend limit and any amount of coins can be delegated. + */ + maxTokens?: Coin; + /** + * allow_list specifies list of validator addresses to whom grantee can delegate tokens on behalf of granter's + * account. + */ + + allowList?: StakeAuthorization_Validators; + /** deny_list specifies list of validator addresses to whom grantee can not delegate tokens. */ + + denyList?: StakeAuthorization_Validators; + /** authorization_type defines one of AuthorizationType. */ + + authorizationType: AuthorizationType; +} +/** + * StakeAuthorization defines authorization for delegate/undelegate/redelegate. + * + * Since: cosmos-sdk 0.43 + */ + +export interface StakeAuthorizationSDKType { + max_tokens?: CoinSDKType; + allow_list?: StakeAuthorization_ValidatorsSDKType; + deny_list?: StakeAuthorization_ValidatorsSDKType; + authorization_type: AuthorizationType; +} +/** Validators defines list of validator addresses. */ + +export interface StakeAuthorization_Validators { + address: string[]; +} +/** Validators defines list of validator addresses. */ + +export interface StakeAuthorization_ValidatorsSDKType { + address: string[]; +} + +function createBaseStakeAuthorization(): StakeAuthorization { + return { + maxTokens: undefined, + allowList: undefined, + denyList: undefined, + authorizationType: 0 + }; +} + +export const StakeAuthorization = { + encode(message: StakeAuthorization, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.maxTokens !== undefined) { + Coin.encode(message.maxTokens, writer.uint32(10).fork()).ldelim(); + } + + if (message.allowList !== undefined) { + StakeAuthorization_Validators.encode(message.allowList, writer.uint32(18).fork()).ldelim(); + } + + if (message.denyList !== undefined) { + StakeAuthorization_Validators.encode(message.denyList, writer.uint32(26).fork()).ldelim(); + } + + if (message.authorizationType !== 0) { + writer.uint32(32).int32(message.authorizationType); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StakeAuthorization { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStakeAuthorization(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.maxTokens = Coin.decode(reader, reader.uint32()); + break; + + case 2: + message.allowList = StakeAuthorization_Validators.decode(reader, reader.uint32()); + break; + + case 3: + message.denyList = StakeAuthorization_Validators.decode(reader, reader.uint32()); + break; + + case 4: + message.authorizationType = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StakeAuthorization { + const message = createBaseStakeAuthorization(); + message.maxTokens = object.maxTokens !== undefined && object.maxTokens !== null ? Coin.fromPartial(object.maxTokens) : undefined; + message.allowList = object.allowList !== undefined && object.allowList !== null ? StakeAuthorization_Validators.fromPartial(object.allowList) : undefined; + message.denyList = object.denyList !== undefined && object.denyList !== null ? StakeAuthorization_Validators.fromPartial(object.denyList) : undefined; + message.authorizationType = object.authorizationType ?? 0; + return message; + } + +}; + +function createBaseStakeAuthorization_Validators(): StakeAuthorization_Validators { + return { + address: [] + }; +} + +export const StakeAuthorization_Validators = { + encode(message: StakeAuthorization_Validators, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.address) { + writer.uint32(10).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StakeAuthorization_Validators { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStakeAuthorization_Validators(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StakeAuthorization_Validators { + const message = createBaseStakeAuthorization_Validators(); + message.address = object.address?.map(e => e) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/staking/v1beta1/genesis.ts b/packages/codegen/src/cosmos/staking/v1beta1/genesis.ts new file mode 100644 index 00000000..30badf95 --- /dev/null +++ b/packages/codegen/src/cosmos/staking/v1beta1/genesis.ts @@ -0,0 +1,231 @@ +import { Params, ParamsSDKType, Validator, ValidatorSDKType, Delegation, DelegationSDKType, UnbondingDelegation, UnbondingDelegationSDKType, Redelegation, RedelegationSDKType } from "./staking"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** GenesisState defines the staking module's genesis state. */ + +export interface GenesisState { + /** params defines all the paramaters of related to deposit. */ + params?: Params; + /** + * last_total_power tracks the total amounts of bonded tokens recorded during + * the previous end block. + */ + + lastTotalPower: Uint8Array; + /** + * last_validator_powers is a special index that provides a historical list + * of the last-block's bonded validators. + */ + + lastValidatorPowers: LastValidatorPower[]; + /** delegations defines the validator set at genesis. */ + + validators: Validator[]; + /** delegations defines the delegations active at genesis. */ + + delegations: Delegation[]; + /** unbonding_delegations defines the unbonding delegations active at genesis. */ + + unbondingDelegations: UnbondingDelegation[]; + /** redelegations defines the redelegations active at genesis. */ + + redelegations: Redelegation[]; + exported: boolean; +} +/** GenesisState defines the staking module's genesis state. */ + +export interface GenesisStateSDKType { + params?: ParamsSDKType; + last_total_power: Uint8Array; + last_validator_powers: LastValidatorPowerSDKType[]; + validators: ValidatorSDKType[]; + delegations: DelegationSDKType[]; + unbonding_delegations: UnbondingDelegationSDKType[]; + redelegations: RedelegationSDKType[]; + exported: boolean; +} +/** LastValidatorPower required for validator set update logic. */ + +export interface LastValidatorPower { + /** address is the address of the validator. */ + address: string; + /** power defines the power of the validator. */ + + power: Long; +} +/** LastValidatorPower required for validator set update logic. */ + +export interface LastValidatorPowerSDKType { + address: string; + power: Long; +} + +function createBaseGenesisState(): GenesisState { + return { + params: undefined, + lastTotalPower: new Uint8Array(), + lastValidatorPowers: [], + validators: [], + delegations: [], + unbondingDelegations: [], + redelegations: [], + exported: false + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + if (message.lastTotalPower.length !== 0) { + writer.uint32(18).bytes(message.lastTotalPower); + } + + for (const v of message.lastValidatorPowers) { + LastValidatorPower.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.validators) { + Validator.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + for (const v of message.delegations) { + Delegation.encode(v!, writer.uint32(42).fork()).ldelim(); + } + + for (const v of message.unbondingDelegations) { + UnbondingDelegation.encode(v!, writer.uint32(50).fork()).ldelim(); + } + + for (const v of message.redelegations) { + Redelegation.encode(v!, writer.uint32(58).fork()).ldelim(); + } + + if (message.exported === true) { + writer.uint32(64).bool(message.exported); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + case 2: + message.lastTotalPower = reader.bytes(); + break; + + case 3: + message.lastValidatorPowers.push(LastValidatorPower.decode(reader, reader.uint32())); + break; + + case 4: + message.validators.push(Validator.decode(reader, reader.uint32())); + break; + + case 5: + message.delegations.push(Delegation.decode(reader, reader.uint32())); + break; + + case 6: + message.unbondingDelegations.push(UnbondingDelegation.decode(reader, reader.uint32())); + break; + + case 7: + message.redelegations.push(Redelegation.decode(reader, reader.uint32())); + break; + + case 8: + message.exported = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + message.lastTotalPower = object.lastTotalPower ?? new Uint8Array(); + message.lastValidatorPowers = object.lastValidatorPowers?.map(e => LastValidatorPower.fromPartial(e)) || []; + message.validators = object.validators?.map(e => Validator.fromPartial(e)) || []; + message.delegations = object.delegations?.map(e => Delegation.fromPartial(e)) || []; + message.unbondingDelegations = object.unbondingDelegations?.map(e => UnbondingDelegation.fromPartial(e)) || []; + message.redelegations = object.redelegations?.map(e => Redelegation.fromPartial(e)) || []; + message.exported = object.exported ?? false; + return message; + } + +}; + +function createBaseLastValidatorPower(): LastValidatorPower { + return { + address: "", + power: Long.ZERO + }; +} + +export const LastValidatorPower = { + encode(message: LastValidatorPower, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (!message.power.isZero()) { + writer.uint32(16).int64(message.power); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): LastValidatorPower { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseLastValidatorPower(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.power = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): LastValidatorPower { + const message = createBaseLastValidatorPower(); + message.address = object.address ?? ""; + message.power = object.power !== undefined && object.power !== null ? Long.fromValue(object.power) : Long.ZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/staking/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/staking/v1beta1/query.lcd.ts new file mode 100644 index 00000000..b404abb4 --- /dev/null +++ b/packages/codegen/src/cosmos/staking/v1beta1/query.lcd.ts @@ -0,0 +1,199 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryValidatorsRequest, QueryValidatorsResponseSDKType, QueryValidatorRequest, QueryValidatorResponseSDKType, QueryValidatorDelegationsRequest, QueryValidatorDelegationsResponseSDKType, QueryValidatorUnbondingDelegationsRequest, QueryValidatorUnbondingDelegationsResponseSDKType, QueryDelegationRequest, QueryDelegationResponseSDKType, QueryUnbondingDelegationRequest, QueryUnbondingDelegationResponseSDKType, QueryDelegatorDelegationsRequest, QueryDelegatorDelegationsResponseSDKType, QueryDelegatorUnbondingDelegationsRequest, QueryDelegatorUnbondingDelegationsResponseSDKType, QueryRedelegationsRequest, QueryRedelegationsResponseSDKType, QueryDelegatorValidatorsRequest, QueryDelegatorValidatorsResponseSDKType, QueryDelegatorValidatorRequest, QueryDelegatorValidatorResponseSDKType, QueryHistoricalInfoRequest, QueryHistoricalInfoResponseSDKType, QueryPoolRequest, QueryPoolResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.validators = this.validators.bind(this); + this.validator = this.validator.bind(this); + this.validatorDelegations = this.validatorDelegations.bind(this); + this.validatorUnbondingDelegations = this.validatorUnbondingDelegations.bind(this); + this.delegation = this.delegation.bind(this); + this.unbondingDelegation = this.unbondingDelegation.bind(this); + this.delegatorDelegations = this.delegatorDelegations.bind(this); + this.delegatorUnbondingDelegations = this.delegatorUnbondingDelegations.bind(this); + this.redelegations = this.redelegations.bind(this); + this.delegatorValidators = this.delegatorValidators.bind(this); + this.delegatorValidator = this.delegatorValidator.bind(this); + this.historicalInfo = this.historicalInfo.bind(this); + this.pool = this.pool.bind(this); + this.params = this.params.bind(this); + } + /* Validators queries all validators that match the given status. */ + + + async validators(params: QueryValidatorsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.status !== "undefined") { + options.params.status = params.status; + } + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/staking/v1beta1/validators`; + return await this.req.get(endpoint, options); + } + /* Validator queries validator info for given validator address. */ + + + async validator(params: QueryValidatorRequest): Promise { + const endpoint = `cosmos/staking/v1beta1/validators/${params.validatorAddr}`; + return await this.req.get(endpoint); + } + /* ValidatorDelegations queries delegate info for given validator. */ + + + async validatorDelegations(params: QueryValidatorDelegationsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/staking/v1beta1/validators/${params.validatorAddr}/delegations`; + return await this.req.get(endpoint, options); + } + /* ValidatorUnbondingDelegations queries unbonding delegations of a validator. */ + + + async validatorUnbondingDelegations(params: QueryValidatorUnbondingDelegationsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/staking/v1beta1/validators/${params.validatorAddr}/unbonding_delegations`; + return await this.req.get(endpoint, options); + } + /* Delegation queries delegate info for given validator delegator pair. */ + + + async delegation(params: QueryDelegationRequest): Promise { + const endpoint = `cosmos/staking/v1beta1/validators/${params.validatorAddr}/delegations/${params.delegatorAddr}`; + return await this.req.get(endpoint); + } + /* UnbondingDelegation queries unbonding info for given validator delegator + pair. */ + + + async unbondingDelegation(params: QueryUnbondingDelegationRequest): Promise { + const endpoint = `cosmos/staking/v1beta1/validators/${params.validatorAddr}/delegations/${params.delegatorAddr}/unbonding_delegation`; + return await this.req.get(endpoint); + } + /* DelegatorDelegations queries all delegations of a given delegator address. */ + + + async delegatorDelegations(params: QueryDelegatorDelegationsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/staking/v1beta1/delegations/${params.delegatorAddr}`; + return await this.req.get(endpoint, options); + } + /* DelegatorUnbondingDelegations queries all unbonding delegations of a given + delegator address. */ + + + async delegatorUnbondingDelegations(params: QueryDelegatorUnbondingDelegationsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/staking/v1beta1/delegators/${params.delegatorAddr}/unbonding_delegations`; + return await this.req.get(endpoint, options); + } + /* Redelegations queries redelegations of given address. */ + + + async redelegations(params: QueryRedelegationsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.srcValidatorAddr !== "undefined") { + options.params.src_validator_addr = params.srcValidatorAddr; + } + + if (typeof params?.dstValidatorAddr !== "undefined") { + options.params.dst_validator_addr = params.dstValidatorAddr; + } + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/staking/v1beta1/delegators/${params.delegatorAddr}/redelegations`; + return await this.req.get(endpoint, options); + } + /* DelegatorValidators queries all validators info for given delegator + address. */ + + + async delegatorValidators(params: QueryDelegatorValidatorsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/staking/v1beta1/delegators/${params.delegatorAddr}/validators`; + return await this.req.get(endpoint, options); + } + /* DelegatorValidator queries validator info for given delegator validator + pair. */ + + + async delegatorValidator(params: QueryDelegatorValidatorRequest): Promise { + const endpoint = `cosmos/staking/v1beta1/delegators/${params.delegatorAddr}/validators/${params.validatorAddr}`; + return await this.req.get(endpoint); + } + /* HistoricalInfo queries the historical info for given height. */ + + + async historicalInfo(params: QueryHistoricalInfoRequest): Promise { + const endpoint = `cosmos/staking/v1beta1/historical_info/${params.height}`; + return await this.req.get(endpoint); + } + /* Pool queries the pool info. */ + + + async pool(_params: QueryPoolRequest = {}): Promise { + const endpoint = `cosmos/staking/v1beta1/pool`; + return await this.req.get(endpoint); + } + /* Parameters queries the staking parameters. */ + + + async params(_params: QueryParamsRequest = {}): Promise { + const endpoint = `cosmos/staking/v1beta1/params`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/staking/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/staking/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..550f592f --- /dev/null +++ b/packages/codegen/src/cosmos/staking/v1beta1/query.rpc.Query.ts @@ -0,0 +1,229 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryValidatorsRequest, QueryValidatorsResponse, QueryValidatorRequest, QueryValidatorResponse, QueryValidatorDelegationsRequest, QueryValidatorDelegationsResponse, QueryValidatorUnbondingDelegationsRequest, QueryValidatorUnbondingDelegationsResponse, QueryDelegationRequest, QueryDelegationResponse, QueryUnbondingDelegationRequest, QueryUnbondingDelegationResponse, QueryDelegatorDelegationsRequest, QueryDelegatorDelegationsResponse, QueryDelegatorUnbondingDelegationsRequest, QueryDelegatorUnbondingDelegationsResponse, QueryRedelegationsRequest, QueryRedelegationsResponse, QueryDelegatorValidatorsRequest, QueryDelegatorValidatorsResponse, QueryDelegatorValidatorRequest, QueryDelegatorValidatorResponse, QueryHistoricalInfoRequest, QueryHistoricalInfoResponse, QueryPoolRequest, QueryPoolResponse, QueryParamsRequest, QueryParamsResponse } from "./query"; +/** Query defines the gRPC querier service. */ + +export interface Query { + /** Validators queries all validators that match the given status. */ + validators(request: QueryValidatorsRequest): Promise; + /** Validator queries validator info for given validator address. */ + + validator(request: QueryValidatorRequest): Promise; + /** ValidatorDelegations queries delegate info for given validator. */ + + validatorDelegations(request: QueryValidatorDelegationsRequest): Promise; + /** ValidatorUnbondingDelegations queries unbonding delegations of a validator. */ + + validatorUnbondingDelegations(request: QueryValidatorUnbondingDelegationsRequest): Promise; + /** Delegation queries delegate info for given validator delegator pair. */ + + delegation(request: QueryDelegationRequest): Promise; + /** + * UnbondingDelegation queries unbonding info for given validator delegator + * pair. + */ + + unbondingDelegation(request: QueryUnbondingDelegationRequest): Promise; + /** DelegatorDelegations queries all delegations of a given delegator address. */ + + delegatorDelegations(request: QueryDelegatorDelegationsRequest): Promise; + /** + * DelegatorUnbondingDelegations queries all unbonding delegations of a given + * delegator address. + */ + + delegatorUnbondingDelegations(request: QueryDelegatorUnbondingDelegationsRequest): Promise; + /** Redelegations queries redelegations of given address. */ + + redelegations(request: QueryRedelegationsRequest): Promise; + /** + * DelegatorValidators queries all validators info for given delegator + * address. + */ + + delegatorValidators(request: QueryDelegatorValidatorsRequest): Promise; + /** + * DelegatorValidator queries validator info for given delegator validator + * pair. + */ + + delegatorValidator(request: QueryDelegatorValidatorRequest): Promise; + /** HistoricalInfo queries the historical info for given height. */ + + historicalInfo(request: QueryHistoricalInfoRequest): Promise; + /** Pool queries the pool info. */ + + pool(request?: QueryPoolRequest): Promise; + /** Parameters queries the staking parameters. */ + + params(request?: QueryParamsRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.validators = this.validators.bind(this); + this.validator = this.validator.bind(this); + this.validatorDelegations = this.validatorDelegations.bind(this); + this.validatorUnbondingDelegations = this.validatorUnbondingDelegations.bind(this); + this.delegation = this.delegation.bind(this); + this.unbondingDelegation = this.unbondingDelegation.bind(this); + this.delegatorDelegations = this.delegatorDelegations.bind(this); + this.delegatorUnbondingDelegations = this.delegatorUnbondingDelegations.bind(this); + this.redelegations = this.redelegations.bind(this); + this.delegatorValidators = this.delegatorValidators.bind(this); + this.delegatorValidator = this.delegatorValidator.bind(this); + this.historicalInfo = this.historicalInfo.bind(this); + this.pool = this.pool.bind(this); + this.params = this.params.bind(this); + } + + validators(request: QueryValidatorsRequest): Promise { + const data = QueryValidatorsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "Validators", data); + return promise.then(data => QueryValidatorsResponse.decode(new _m0.Reader(data))); + } + + validator(request: QueryValidatorRequest): Promise { + const data = QueryValidatorRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "Validator", data); + return promise.then(data => QueryValidatorResponse.decode(new _m0.Reader(data))); + } + + validatorDelegations(request: QueryValidatorDelegationsRequest): Promise { + const data = QueryValidatorDelegationsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "ValidatorDelegations", data); + return promise.then(data => QueryValidatorDelegationsResponse.decode(new _m0.Reader(data))); + } + + validatorUnbondingDelegations(request: QueryValidatorUnbondingDelegationsRequest): Promise { + const data = QueryValidatorUnbondingDelegationsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "ValidatorUnbondingDelegations", data); + return promise.then(data => QueryValidatorUnbondingDelegationsResponse.decode(new _m0.Reader(data))); + } + + delegation(request: QueryDelegationRequest): Promise { + const data = QueryDelegationRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "Delegation", data); + return promise.then(data => QueryDelegationResponse.decode(new _m0.Reader(data))); + } + + unbondingDelegation(request: QueryUnbondingDelegationRequest): Promise { + const data = QueryUnbondingDelegationRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "UnbondingDelegation", data); + return promise.then(data => QueryUnbondingDelegationResponse.decode(new _m0.Reader(data))); + } + + delegatorDelegations(request: QueryDelegatorDelegationsRequest): Promise { + const data = QueryDelegatorDelegationsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "DelegatorDelegations", data); + return promise.then(data => QueryDelegatorDelegationsResponse.decode(new _m0.Reader(data))); + } + + delegatorUnbondingDelegations(request: QueryDelegatorUnbondingDelegationsRequest): Promise { + const data = QueryDelegatorUnbondingDelegationsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "DelegatorUnbondingDelegations", data); + return promise.then(data => QueryDelegatorUnbondingDelegationsResponse.decode(new _m0.Reader(data))); + } + + redelegations(request: QueryRedelegationsRequest): Promise { + const data = QueryRedelegationsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "Redelegations", data); + return promise.then(data => QueryRedelegationsResponse.decode(new _m0.Reader(data))); + } + + delegatorValidators(request: QueryDelegatorValidatorsRequest): Promise { + const data = QueryDelegatorValidatorsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "DelegatorValidators", data); + return promise.then(data => QueryDelegatorValidatorsResponse.decode(new _m0.Reader(data))); + } + + delegatorValidator(request: QueryDelegatorValidatorRequest): Promise { + const data = QueryDelegatorValidatorRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "DelegatorValidator", data); + return promise.then(data => QueryDelegatorValidatorResponse.decode(new _m0.Reader(data))); + } + + historicalInfo(request: QueryHistoricalInfoRequest): Promise { + const data = QueryHistoricalInfoRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "HistoricalInfo", data); + return promise.then(data => QueryHistoricalInfoResponse.decode(new _m0.Reader(data))); + } + + pool(request: QueryPoolRequest = {}): Promise { + const data = QueryPoolRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "Pool", data); + return promise.then(data => QueryPoolResponse.decode(new _m0.Reader(data))); + } + + params(request: QueryParamsRequest = {}): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + validators(request: QueryValidatorsRequest): Promise { + return queryService.validators(request); + }, + + validator(request: QueryValidatorRequest): Promise { + return queryService.validator(request); + }, + + validatorDelegations(request: QueryValidatorDelegationsRequest): Promise { + return queryService.validatorDelegations(request); + }, + + validatorUnbondingDelegations(request: QueryValidatorUnbondingDelegationsRequest): Promise { + return queryService.validatorUnbondingDelegations(request); + }, + + delegation(request: QueryDelegationRequest): Promise { + return queryService.delegation(request); + }, + + unbondingDelegation(request: QueryUnbondingDelegationRequest): Promise { + return queryService.unbondingDelegation(request); + }, + + delegatorDelegations(request: QueryDelegatorDelegationsRequest): Promise { + return queryService.delegatorDelegations(request); + }, + + delegatorUnbondingDelegations(request: QueryDelegatorUnbondingDelegationsRequest): Promise { + return queryService.delegatorUnbondingDelegations(request); + }, + + redelegations(request: QueryRedelegationsRequest): Promise { + return queryService.redelegations(request); + }, + + delegatorValidators(request: QueryDelegatorValidatorsRequest): Promise { + return queryService.delegatorValidators(request); + }, + + delegatorValidator(request: QueryDelegatorValidatorRequest): Promise { + return queryService.delegatorValidator(request); + }, + + historicalInfo(request: QueryHistoricalInfoRequest): Promise { + return queryService.historicalInfo(request); + }, + + pool(request?: QueryPoolRequest): Promise { + return queryService.pool(request); + }, + + params(request?: QueryParamsRequest): Promise { + return queryService.params(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/staking/v1beta1/query.ts b/packages/codegen/src/cosmos/staking/v1beta1/query.ts new file mode 100644 index 00000000..cd53260d --- /dev/null +++ b/packages/codegen/src/cosmos/staking/v1beta1/query.ts @@ -0,0 +1,1910 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { Validator, ValidatorSDKType, DelegationResponse, DelegationResponseSDKType, UnbondingDelegation, UnbondingDelegationSDKType, RedelegationResponse, RedelegationResponseSDKType, HistoricalInfo, HistoricalInfoSDKType, Pool, PoolSDKType, Params, ParamsSDKType } from "./staking"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** QueryValidatorsRequest is request type for Query/Validators RPC method. */ + +export interface QueryValidatorsRequest { + /** status enables to query for validators matching a given status. */ + status: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** QueryValidatorsRequest is request type for Query/Validators RPC method. */ + +export interface QueryValidatorsRequestSDKType { + status: string; + pagination?: PageRequestSDKType; +} +/** QueryValidatorsResponse is response type for the Query/Validators RPC method */ + +export interface QueryValidatorsResponse { + /** validators contains all the queried validators. */ + validators: Validator[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryValidatorsResponse is response type for the Query/Validators RPC method */ + +export interface QueryValidatorsResponseSDKType { + validators: ValidatorSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryValidatorRequest is response type for the Query/Validator RPC method */ + +export interface QueryValidatorRequest { + /** validator_addr defines the validator address to query for. */ + validatorAddr: string; +} +/** QueryValidatorRequest is response type for the Query/Validator RPC method */ + +export interface QueryValidatorRequestSDKType { + validator_addr: string; +} +/** QueryValidatorResponse is response type for the Query/Validator RPC method */ + +export interface QueryValidatorResponse { + /** validator defines the the validator info. */ + validator?: Validator; +} +/** QueryValidatorResponse is response type for the Query/Validator RPC method */ + +export interface QueryValidatorResponseSDKType { + validator?: ValidatorSDKType; +} +/** + * QueryValidatorDelegationsRequest is request type for the + * Query/ValidatorDelegations RPC method + */ + +export interface QueryValidatorDelegationsRequest { + /** validator_addr defines the validator address to query for. */ + validatorAddr: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** + * QueryValidatorDelegationsRequest is request type for the + * Query/ValidatorDelegations RPC method + */ + +export interface QueryValidatorDelegationsRequestSDKType { + validator_addr: string; + pagination?: PageRequestSDKType; +} +/** + * QueryValidatorDelegationsResponse is response type for the + * Query/ValidatorDelegations RPC method + */ + +export interface QueryValidatorDelegationsResponse { + delegationResponses: DelegationResponse[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryValidatorDelegationsResponse is response type for the + * Query/ValidatorDelegations RPC method + */ + +export interface QueryValidatorDelegationsResponseSDKType { + delegation_responses: DelegationResponseSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryValidatorUnbondingDelegationsRequest is required type for the + * Query/ValidatorUnbondingDelegations RPC method + */ + +export interface QueryValidatorUnbondingDelegationsRequest { + /** validator_addr defines the validator address to query for. */ + validatorAddr: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** + * QueryValidatorUnbondingDelegationsRequest is required type for the + * Query/ValidatorUnbondingDelegations RPC method + */ + +export interface QueryValidatorUnbondingDelegationsRequestSDKType { + validator_addr: string; + pagination?: PageRequestSDKType; +} +/** + * QueryValidatorUnbondingDelegationsResponse is response type for the + * Query/ValidatorUnbondingDelegations RPC method. + */ + +export interface QueryValidatorUnbondingDelegationsResponse { + unbondingResponses: UnbondingDelegation[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryValidatorUnbondingDelegationsResponse is response type for the + * Query/ValidatorUnbondingDelegations RPC method. + */ + +export interface QueryValidatorUnbondingDelegationsResponseSDKType { + unbonding_responses: UnbondingDelegationSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryDelegationRequest is request type for the Query/Delegation RPC method. */ + +export interface QueryDelegationRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** validator_addr defines the validator address to query for. */ + + validatorAddr: string; +} +/** QueryDelegationRequest is request type for the Query/Delegation RPC method. */ + +export interface QueryDelegationRequestSDKType { + delegator_addr: string; + validator_addr: string; +} +/** QueryDelegationResponse is response type for the Query/Delegation RPC method. */ + +export interface QueryDelegationResponse { + /** delegation_responses defines the delegation info of a delegation. */ + delegationResponse?: DelegationResponse; +} +/** QueryDelegationResponse is response type for the Query/Delegation RPC method. */ + +export interface QueryDelegationResponseSDKType { + delegation_response?: DelegationResponseSDKType; +} +/** + * QueryUnbondingDelegationRequest is request type for the + * Query/UnbondingDelegation RPC method. + */ + +export interface QueryUnbondingDelegationRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** validator_addr defines the validator address to query for. */ + + validatorAddr: string; +} +/** + * QueryUnbondingDelegationRequest is request type for the + * Query/UnbondingDelegation RPC method. + */ + +export interface QueryUnbondingDelegationRequestSDKType { + delegator_addr: string; + validator_addr: string; +} +/** + * QueryDelegationResponse is response type for the Query/UnbondingDelegation + * RPC method. + */ + +export interface QueryUnbondingDelegationResponse { + /** unbond defines the unbonding information of a delegation. */ + unbond?: UnbondingDelegation; +} +/** + * QueryDelegationResponse is response type for the Query/UnbondingDelegation + * RPC method. + */ + +export interface QueryUnbondingDelegationResponseSDKType { + unbond?: UnbondingDelegationSDKType; +} +/** + * QueryDelegatorDelegationsRequest is request type for the + * Query/DelegatorDelegations RPC method. + */ + +export interface QueryDelegatorDelegationsRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** + * QueryDelegatorDelegationsRequest is request type for the + * Query/DelegatorDelegations RPC method. + */ + +export interface QueryDelegatorDelegationsRequestSDKType { + delegator_addr: string; + pagination?: PageRequestSDKType; +} +/** + * QueryDelegatorDelegationsResponse is response type for the + * Query/DelegatorDelegations RPC method. + */ + +export interface QueryDelegatorDelegationsResponse { + /** delegation_responses defines all the delegations' info of a delegator. */ + delegationResponses: DelegationResponse[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryDelegatorDelegationsResponse is response type for the + * Query/DelegatorDelegations RPC method. + */ + +export interface QueryDelegatorDelegationsResponseSDKType { + delegation_responses: DelegationResponseSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryDelegatorUnbondingDelegationsRequest is request type for the + * Query/DelegatorUnbondingDelegations RPC method. + */ + +export interface QueryDelegatorUnbondingDelegationsRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** + * QueryDelegatorUnbondingDelegationsRequest is request type for the + * Query/DelegatorUnbondingDelegations RPC method. + */ + +export interface QueryDelegatorUnbondingDelegationsRequestSDKType { + delegator_addr: string; + pagination?: PageRequestSDKType; +} +/** + * QueryUnbondingDelegatorDelegationsResponse is response type for the + * Query/UnbondingDelegatorDelegations RPC method. + */ + +export interface QueryDelegatorUnbondingDelegationsResponse { + unbondingResponses: UnbondingDelegation[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryUnbondingDelegatorDelegationsResponse is response type for the + * Query/UnbondingDelegatorDelegations RPC method. + */ + +export interface QueryDelegatorUnbondingDelegationsResponseSDKType { + unbonding_responses: UnbondingDelegationSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryRedelegationsRequest is request type for the Query/Redelegations RPC + * method. + */ + +export interface QueryRedelegationsRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** src_validator_addr defines the validator address to redelegate from. */ + + srcValidatorAddr: string; + /** dst_validator_addr defines the validator address to redelegate to. */ + + dstValidatorAddr: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** + * QueryRedelegationsRequest is request type for the Query/Redelegations RPC + * method. + */ + +export interface QueryRedelegationsRequestSDKType { + delegator_addr: string; + src_validator_addr: string; + dst_validator_addr: string; + pagination?: PageRequestSDKType; +} +/** + * QueryRedelegationsResponse is response type for the Query/Redelegations RPC + * method. + */ + +export interface QueryRedelegationsResponse { + redelegationResponses: RedelegationResponse[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryRedelegationsResponse is response type for the Query/Redelegations RPC + * method. + */ + +export interface QueryRedelegationsResponseSDKType { + redelegation_responses: RedelegationResponseSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryDelegatorValidatorsRequest is request type for the + * Query/DelegatorValidators RPC method. + */ + +export interface QueryDelegatorValidatorsRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** + * QueryDelegatorValidatorsRequest is request type for the + * Query/DelegatorValidators RPC method. + */ + +export interface QueryDelegatorValidatorsRequestSDKType { + delegator_addr: string; + pagination?: PageRequestSDKType; +} +/** + * QueryDelegatorValidatorsResponse is response type for the + * Query/DelegatorValidators RPC method. + */ + +export interface QueryDelegatorValidatorsResponse { + /** validators defines the the validators' info of a delegator. */ + validators: Validator[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryDelegatorValidatorsResponse is response type for the + * Query/DelegatorValidators RPC method. + */ + +export interface QueryDelegatorValidatorsResponseSDKType { + validators: ValidatorSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryDelegatorValidatorRequest is request type for the + * Query/DelegatorValidator RPC method. + */ + +export interface QueryDelegatorValidatorRequest { + /** delegator_addr defines the delegator address to query for. */ + delegatorAddr: string; + /** validator_addr defines the validator address to query for. */ + + validatorAddr: string; +} +/** + * QueryDelegatorValidatorRequest is request type for the + * Query/DelegatorValidator RPC method. + */ + +export interface QueryDelegatorValidatorRequestSDKType { + delegator_addr: string; + validator_addr: string; +} +/** + * QueryDelegatorValidatorResponse response type for the + * Query/DelegatorValidator RPC method. + */ + +export interface QueryDelegatorValidatorResponse { + /** validator defines the the validator info. */ + validator?: Validator; +} +/** + * QueryDelegatorValidatorResponse response type for the + * Query/DelegatorValidator RPC method. + */ + +export interface QueryDelegatorValidatorResponseSDKType { + validator?: ValidatorSDKType; +} +/** + * QueryHistoricalInfoRequest is request type for the Query/HistoricalInfo RPC + * method. + */ + +export interface QueryHistoricalInfoRequest { + /** height defines at which height to query the historical info. */ + height: Long; +} +/** + * QueryHistoricalInfoRequest is request type for the Query/HistoricalInfo RPC + * method. + */ + +export interface QueryHistoricalInfoRequestSDKType { + height: Long; +} +/** + * QueryHistoricalInfoResponse is response type for the Query/HistoricalInfo RPC + * method. + */ + +export interface QueryHistoricalInfoResponse { + /** hist defines the historical info at the given height. */ + hist?: HistoricalInfo; +} +/** + * QueryHistoricalInfoResponse is response type for the Query/HistoricalInfo RPC + * method. + */ + +export interface QueryHistoricalInfoResponseSDKType { + hist?: HistoricalInfoSDKType; +} +/** QueryPoolRequest is request type for the Query/Pool RPC method. */ + +export interface QueryPoolRequest {} +/** QueryPoolRequest is request type for the Query/Pool RPC method. */ + +export interface QueryPoolRequestSDKType {} +/** QueryPoolResponse is response type for the Query/Pool RPC method. */ + +export interface QueryPoolResponse { + /** pool defines the pool info. */ + pool?: Pool; +} +/** QueryPoolResponse is response type for the Query/Pool RPC method. */ + +export interface QueryPoolResponseSDKType { + pool?: PoolSDKType; +} +/** QueryParamsRequest is request type for the Query/Params RPC method. */ + +export interface QueryParamsRequest {} +/** QueryParamsRequest is request type for the Query/Params RPC method. */ + +export interface QueryParamsRequestSDKType {} +/** QueryParamsResponse is response type for the Query/Params RPC method. */ + +export interface QueryParamsResponse { + /** params holds all the parameters of this module. */ + params?: Params; +} +/** QueryParamsResponse is response type for the Query/Params RPC method. */ + +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} + +function createBaseQueryValidatorsRequest(): QueryValidatorsRequest { + return { + status: "", + pagination: undefined + }; +} + +export const QueryValidatorsRequest = { + encode(message: QueryValidatorsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.status !== "") { + writer.uint32(10).string(message.status); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.status = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorsRequest { + const message = createBaseQueryValidatorsRequest(); + message.status = object.status ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryValidatorsResponse(): QueryValidatorsResponse { + return { + validators: [], + pagination: undefined + }; +} + +export const QueryValidatorsResponse = { + encode(message: QueryValidatorsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.validators) { + Validator.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validators.push(Validator.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorsResponse { + const message = createBaseQueryValidatorsResponse(); + message.validators = object.validators?.map(e => Validator.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryValidatorRequest(): QueryValidatorRequest { + return { + validatorAddr: "" + }; +} + +export const QueryValidatorRequest = { + encode(message: QueryValidatorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddr !== "") { + writer.uint32(10).string(message.validatorAddr); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddr = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorRequest { + const message = createBaseQueryValidatorRequest(); + message.validatorAddr = object.validatorAddr ?? ""; + return message; + } + +}; + +function createBaseQueryValidatorResponse(): QueryValidatorResponse { + return { + validator: undefined + }; +} + +export const QueryValidatorResponse = { + encode(message: QueryValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validator !== undefined) { + Validator.encode(message.validator, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validator = Validator.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorResponse { + const message = createBaseQueryValidatorResponse(); + message.validator = object.validator !== undefined && object.validator !== null ? Validator.fromPartial(object.validator) : undefined; + return message; + } + +}; + +function createBaseQueryValidatorDelegationsRequest(): QueryValidatorDelegationsRequest { + return { + validatorAddr: "", + pagination: undefined + }; +} + +export const QueryValidatorDelegationsRequest = { + encode(message: QueryValidatorDelegationsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddr !== "") { + writer.uint32(10).string(message.validatorAddr); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorDelegationsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorDelegationsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddr = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorDelegationsRequest { + const message = createBaseQueryValidatorDelegationsRequest(); + message.validatorAddr = object.validatorAddr ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryValidatorDelegationsResponse(): QueryValidatorDelegationsResponse { + return { + delegationResponses: [], + pagination: undefined + }; +} + +export const QueryValidatorDelegationsResponse = { + encode(message: QueryValidatorDelegationsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.delegationResponses) { + DelegationResponse.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorDelegationsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorDelegationsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegationResponses.push(DelegationResponse.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorDelegationsResponse { + const message = createBaseQueryValidatorDelegationsResponse(); + message.delegationResponses = object.delegationResponses?.map(e => DelegationResponse.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryValidatorUnbondingDelegationsRequest(): QueryValidatorUnbondingDelegationsRequest { + return { + validatorAddr: "", + pagination: undefined + }; +} + +export const QueryValidatorUnbondingDelegationsRequest = { + encode(message: QueryValidatorUnbondingDelegationsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validatorAddr !== "") { + writer.uint32(10).string(message.validatorAddr); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorUnbondingDelegationsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorUnbondingDelegationsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorAddr = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorUnbondingDelegationsRequest { + const message = createBaseQueryValidatorUnbondingDelegationsRequest(); + message.validatorAddr = object.validatorAddr ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryValidatorUnbondingDelegationsResponse(): QueryValidatorUnbondingDelegationsResponse { + return { + unbondingResponses: [], + pagination: undefined + }; +} + +export const QueryValidatorUnbondingDelegationsResponse = { + encode(message: QueryValidatorUnbondingDelegationsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.unbondingResponses) { + UnbondingDelegation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryValidatorUnbondingDelegationsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryValidatorUnbondingDelegationsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.unbondingResponses.push(UnbondingDelegation.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryValidatorUnbondingDelegationsResponse { + const message = createBaseQueryValidatorUnbondingDelegationsResponse(); + message.unbondingResponses = object.unbondingResponses?.map(e => UnbondingDelegation.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDelegationRequest(): QueryDelegationRequest { + return { + delegatorAddr: "", + validatorAddr: "" + }; +} + +export const QueryDelegationRequest = { + encode(message: QueryDelegationRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddr !== "") { + writer.uint32(10).string(message.delegatorAddr); + } + + if (message.validatorAddr !== "") { + writer.uint32(18).string(message.validatorAddr); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegationRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegationRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddr = reader.string(); + break; + + case 2: + message.validatorAddr = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegationRequest { + const message = createBaseQueryDelegationRequest(); + message.delegatorAddr = object.delegatorAddr ?? ""; + message.validatorAddr = object.validatorAddr ?? ""; + return message; + } + +}; + +function createBaseQueryDelegationResponse(): QueryDelegationResponse { + return { + delegationResponse: undefined + }; +} + +export const QueryDelegationResponse = { + encode(message: QueryDelegationResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegationResponse !== undefined) { + DelegationResponse.encode(message.delegationResponse, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegationResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegationResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegationResponse = DelegationResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegationResponse { + const message = createBaseQueryDelegationResponse(); + message.delegationResponse = object.delegationResponse !== undefined && object.delegationResponse !== null ? DelegationResponse.fromPartial(object.delegationResponse) : undefined; + return message; + } + +}; + +function createBaseQueryUnbondingDelegationRequest(): QueryUnbondingDelegationRequest { + return { + delegatorAddr: "", + validatorAddr: "" + }; +} + +export const QueryUnbondingDelegationRequest = { + encode(message: QueryUnbondingDelegationRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddr !== "") { + writer.uint32(10).string(message.delegatorAddr); + } + + if (message.validatorAddr !== "") { + writer.uint32(18).string(message.validatorAddr); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnbondingDelegationRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUnbondingDelegationRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddr = reader.string(); + break; + + case 2: + message.validatorAddr = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryUnbondingDelegationRequest { + const message = createBaseQueryUnbondingDelegationRequest(); + message.delegatorAddr = object.delegatorAddr ?? ""; + message.validatorAddr = object.validatorAddr ?? ""; + return message; + } + +}; + +function createBaseQueryUnbondingDelegationResponse(): QueryUnbondingDelegationResponse { + return { + unbond: undefined + }; +} + +export const QueryUnbondingDelegationResponse = { + encode(message: QueryUnbondingDelegationResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.unbond !== undefined) { + UnbondingDelegation.encode(message.unbond, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnbondingDelegationResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUnbondingDelegationResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.unbond = UnbondingDelegation.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryUnbondingDelegationResponse { + const message = createBaseQueryUnbondingDelegationResponse(); + message.unbond = object.unbond !== undefined && object.unbond !== null ? UnbondingDelegation.fromPartial(object.unbond) : undefined; + return message; + } + +}; + +function createBaseQueryDelegatorDelegationsRequest(): QueryDelegatorDelegationsRequest { + return { + delegatorAddr: "", + pagination: undefined + }; +} + +export const QueryDelegatorDelegationsRequest = { + encode(message: QueryDelegatorDelegationsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddr !== "") { + writer.uint32(10).string(message.delegatorAddr); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorDelegationsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegatorDelegationsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddr = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegatorDelegationsRequest { + const message = createBaseQueryDelegatorDelegationsRequest(); + message.delegatorAddr = object.delegatorAddr ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDelegatorDelegationsResponse(): QueryDelegatorDelegationsResponse { + return { + delegationResponses: [], + pagination: undefined + }; +} + +export const QueryDelegatorDelegationsResponse = { + encode(message: QueryDelegatorDelegationsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.delegationResponses) { + DelegationResponse.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorDelegationsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegatorDelegationsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegationResponses.push(DelegationResponse.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegatorDelegationsResponse { + const message = createBaseQueryDelegatorDelegationsResponse(); + message.delegationResponses = object.delegationResponses?.map(e => DelegationResponse.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDelegatorUnbondingDelegationsRequest(): QueryDelegatorUnbondingDelegationsRequest { + return { + delegatorAddr: "", + pagination: undefined + }; +} + +export const QueryDelegatorUnbondingDelegationsRequest = { + encode(message: QueryDelegatorUnbondingDelegationsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddr !== "") { + writer.uint32(10).string(message.delegatorAddr); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorUnbondingDelegationsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegatorUnbondingDelegationsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddr = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegatorUnbondingDelegationsRequest { + const message = createBaseQueryDelegatorUnbondingDelegationsRequest(); + message.delegatorAddr = object.delegatorAddr ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDelegatorUnbondingDelegationsResponse(): QueryDelegatorUnbondingDelegationsResponse { + return { + unbondingResponses: [], + pagination: undefined + }; +} + +export const QueryDelegatorUnbondingDelegationsResponse = { + encode(message: QueryDelegatorUnbondingDelegationsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.unbondingResponses) { + UnbondingDelegation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorUnbondingDelegationsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegatorUnbondingDelegationsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.unbondingResponses.push(UnbondingDelegation.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegatorUnbondingDelegationsResponse { + const message = createBaseQueryDelegatorUnbondingDelegationsResponse(); + message.unbondingResponses = object.unbondingResponses?.map(e => UnbondingDelegation.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryRedelegationsRequest(): QueryRedelegationsRequest { + return { + delegatorAddr: "", + srcValidatorAddr: "", + dstValidatorAddr: "", + pagination: undefined + }; +} + +export const QueryRedelegationsRequest = { + encode(message: QueryRedelegationsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddr !== "") { + writer.uint32(10).string(message.delegatorAddr); + } + + if (message.srcValidatorAddr !== "") { + writer.uint32(18).string(message.srcValidatorAddr); + } + + if (message.dstValidatorAddr !== "") { + writer.uint32(26).string(message.dstValidatorAddr); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryRedelegationsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryRedelegationsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddr = reader.string(); + break; + + case 2: + message.srcValidatorAddr = reader.string(); + break; + + case 3: + message.dstValidatorAddr = reader.string(); + break; + + case 4: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryRedelegationsRequest { + const message = createBaseQueryRedelegationsRequest(); + message.delegatorAddr = object.delegatorAddr ?? ""; + message.srcValidatorAddr = object.srcValidatorAddr ?? ""; + message.dstValidatorAddr = object.dstValidatorAddr ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryRedelegationsResponse(): QueryRedelegationsResponse { + return { + redelegationResponses: [], + pagination: undefined + }; +} + +export const QueryRedelegationsResponse = { + encode(message: QueryRedelegationsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.redelegationResponses) { + RedelegationResponse.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryRedelegationsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryRedelegationsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.redelegationResponses.push(RedelegationResponse.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryRedelegationsResponse { + const message = createBaseQueryRedelegationsResponse(); + message.redelegationResponses = object.redelegationResponses?.map(e => RedelegationResponse.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDelegatorValidatorsRequest(): QueryDelegatorValidatorsRequest { + return { + delegatorAddr: "", + pagination: undefined + }; +} + +export const QueryDelegatorValidatorsRequest = { + encode(message: QueryDelegatorValidatorsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddr !== "") { + writer.uint32(10).string(message.delegatorAddr); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorValidatorsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegatorValidatorsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddr = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegatorValidatorsRequest { + const message = createBaseQueryDelegatorValidatorsRequest(); + message.delegatorAddr = object.delegatorAddr ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDelegatorValidatorsResponse(): QueryDelegatorValidatorsResponse { + return { + validators: [], + pagination: undefined + }; +} + +export const QueryDelegatorValidatorsResponse = { + encode(message: QueryDelegatorValidatorsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.validators) { + Validator.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorValidatorsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegatorValidatorsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validators.push(Validator.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegatorValidatorsResponse { + const message = createBaseQueryDelegatorValidatorsResponse(); + message.validators = object.validators?.map(e => Validator.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDelegatorValidatorRequest(): QueryDelegatorValidatorRequest { + return { + delegatorAddr: "", + validatorAddr: "" + }; +} + +export const QueryDelegatorValidatorRequest = { + encode(message: QueryDelegatorValidatorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddr !== "") { + writer.uint32(10).string(message.delegatorAddr); + } + + if (message.validatorAddr !== "") { + writer.uint32(18).string(message.validatorAddr); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorValidatorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegatorValidatorRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddr = reader.string(); + break; + + case 2: + message.validatorAddr = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegatorValidatorRequest { + const message = createBaseQueryDelegatorValidatorRequest(); + message.delegatorAddr = object.delegatorAddr ?? ""; + message.validatorAddr = object.validatorAddr ?? ""; + return message; + } + +}; + +function createBaseQueryDelegatorValidatorResponse(): QueryDelegatorValidatorResponse { + return { + validator: undefined + }; +} + +export const QueryDelegatorValidatorResponse = { + encode(message: QueryDelegatorValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validator !== undefined) { + Validator.encode(message.validator, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDelegatorValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDelegatorValidatorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validator = Validator.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDelegatorValidatorResponse { + const message = createBaseQueryDelegatorValidatorResponse(); + message.validator = object.validator !== undefined && object.validator !== null ? Validator.fromPartial(object.validator) : undefined; + return message; + } + +}; + +function createBaseQueryHistoricalInfoRequest(): QueryHistoricalInfoRequest { + return { + height: Long.ZERO + }; +} + +export const QueryHistoricalInfoRequest = { + encode(message: QueryHistoricalInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).int64(message.height); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryHistoricalInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryHistoricalInfoRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryHistoricalInfoRequest { + const message = createBaseQueryHistoricalInfoRequest(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + return message; + } + +}; + +function createBaseQueryHistoricalInfoResponse(): QueryHistoricalInfoResponse { + return { + hist: undefined + }; +} + +export const QueryHistoricalInfoResponse = { + encode(message: QueryHistoricalInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hist !== undefined) { + HistoricalInfo.encode(message.hist, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryHistoricalInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryHistoricalInfoResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.hist = HistoricalInfo.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryHistoricalInfoResponse { + const message = createBaseQueryHistoricalInfoResponse(); + message.hist = object.hist !== undefined && object.hist !== null ? HistoricalInfo.fromPartial(object.hist) : undefined; + return message; + } + +}; + +function createBaseQueryPoolRequest(): QueryPoolRequest { + return {}; +} + +export const QueryPoolRequest = { + encode(_: QueryPoolRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPoolRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPoolRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryPoolRequest { + const message = createBaseQueryPoolRequest(); + return message; + } + +}; + +function createBaseQueryPoolResponse(): QueryPoolResponse { + return { + pool: undefined + }; +} + +export const QueryPoolResponse = { + encode(message: QueryPoolResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pool !== undefined) { + Pool.encode(message.pool, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPoolResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPoolResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pool = Pool.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPoolResponse { + const message = createBaseQueryPoolResponse(); + message.pool = object.pool !== undefined && object.pool !== null ? Pool.fromPartial(object.pool) : undefined; + return message; + } + +}; + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + params: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/staking/v1beta1/staking.ts b/packages/codegen/src/cosmos/staking/v1beta1/staking.ts new file mode 100644 index 00000000..902f9529 --- /dev/null +++ b/packages/codegen/src/cosmos/staking/v1beta1/staking.ts @@ -0,0 +1,1863 @@ +import { Header, HeaderSDKType } from "../../../tendermint/types/types"; +import { Timestamp } from "../../../google/protobuf/timestamp"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Duration, DurationSDKType } from "../../../google/protobuf/duration"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, toTimestamp, fromTimestamp, Long } from "../../../helpers"; +/** BondStatus is the status of a validator. */ + +export enum BondStatus { + /** BOND_STATUS_UNSPECIFIED - UNSPECIFIED defines an invalid validator status. */ + BOND_STATUS_UNSPECIFIED = 0, + + /** BOND_STATUS_UNBONDED - UNBONDED defines a validator that is not bonded. */ + BOND_STATUS_UNBONDED = 1, + + /** BOND_STATUS_UNBONDING - UNBONDING defines a validator that is unbonding. */ + BOND_STATUS_UNBONDING = 2, + + /** BOND_STATUS_BONDED - BONDED defines a validator that is bonded. */ + BOND_STATUS_BONDED = 3, + UNRECOGNIZED = -1, +} +export const BondStatusSDKType = BondStatus; +export function bondStatusFromJSON(object: any): BondStatus { + switch (object) { + case 0: + case "BOND_STATUS_UNSPECIFIED": + return BondStatus.BOND_STATUS_UNSPECIFIED; + + case 1: + case "BOND_STATUS_UNBONDED": + return BondStatus.BOND_STATUS_UNBONDED; + + case 2: + case "BOND_STATUS_UNBONDING": + return BondStatus.BOND_STATUS_UNBONDING; + + case 3: + case "BOND_STATUS_BONDED": + return BondStatus.BOND_STATUS_BONDED; + + case -1: + case "UNRECOGNIZED": + default: + return BondStatus.UNRECOGNIZED; + } +} +export function bondStatusToJSON(object: BondStatus): string { + switch (object) { + case BondStatus.BOND_STATUS_UNSPECIFIED: + return "BOND_STATUS_UNSPECIFIED"; + + case BondStatus.BOND_STATUS_UNBONDED: + return "BOND_STATUS_UNBONDED"; + + case BondStatus.BOND_STATUS_UNBONDING: + return "BOND_STATUS_UNBONDING"; + + case BondStatus.BOND_STATUS_BONDED: + return "BOND_STATUS_BONDED"; + + case BondStatus.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * HistoricalInfo contains header and validator information for a given block. + * It is stored as part of staking module's state, which persists the `n` most + * recent HistoricalInfo + * (`n` is set by the staking module's `historical_entries` parameter). + */ + +export interface HistoricalInfo { + header?: Header; + valset: Validator[]; +} +/** + * HistoricalInfo contains header and validator information for a given block. + * It is stored as part of staking module's state, which persists the `n` most + * recent HistoricalInfo + * (`n` is set by the staking module's `historical_entries` parameter). + */ + +export interface HistoricalInfoSDKType { + header?: HeaderSDKType; + valset: ValidatorSDKType[]; +} +/** + * CommissionRates defines the initial commission rates to be used for creating + * a validator. + */ + +export interface CommissionRates { + /** rate is the commission rate charged to delegators, as a fraction. */ + rate: string; + /** max_rate defines the maximum commission rate which validator can ever charge, as a fraction. */ + + maxRate: string; + /** max_change_rate defines the maximum daily increase of the validator commission, as a fraction. */ + + maxChangeRate: string; +} +/** + * CommissionRates defines the initial commission rates to be used for creating + * a validator. + */ + +export interface CommissionRatesSDKType { + rate: string; + max_rate: string; + max_change_rate: string; +} +/** Commission defines commission parameters for a given validator. */ + +export interface Commission { + /** commission_rates defines the initial commission rates to be used for creating a validator. */ + commissionRates?: CommissionRates; + /** update_time is the last time the commission rate was changed. */ + + updateTime?: Date; +} +/** Commission defines commission parameters for a given validator. */ + +export interface CommissionSDKType { + commission_rates?: CommissionRatesSDKType; + update_time?: Date; +} +/** Description defines a validator description. */ + +export interface Description { + /** moniker defines a human-readable name for the validator. */ + moniker: string; + /** identity defines an optional identity signature (ex. UPort or Keybase). */ + + identity: string; + /** website defines an optional website link. */ + + website: string; + /** security_contact defines an optional email for security contact. */ + + securityContact: string; + /** details define other optional details. */ + + details: string; +} +/** Description defines a validator description. */ + +export interface DescriptionSDKType { + moniker: string; + identity: string; + website: string; + security_contact: string; + details: string; +} +/** + * Validator defines a validator, together with the total amount of the + * Validator's bond shares and their exchange rate to coins. Slashing results in + * a decrease in the exchange rate, allowing correct calculation of future + * undelegations without iterating over delegators. When coins are delegated to + * this validator, the validator is credited with a delegation whose number of + * bond shares is based on the amount of coins delegated divided by the current + * exchange rate. Voting power can be calculated as total bonded shares + * multiplied by exchange rate. + */ + +export interface Validator { + /** operator_address defines the address of the validator's operator; bech encoded in JSON. */ + operatorAddress: string; + /** consensus_pubkey is the consensus public key of the validator, as a Protobuf Any. */ + + consensusPubkey?: Any; + /** jailed defined whether the validator has been jailed from bonded status or not. */ + + jailed: boolean; + /** status is the validator status (bonded/unbonding/unbonded). */ + + status: BondStatus; + /** tokens define the delegated tokens (incl. self-delegation). */ + + tokens: string; + /** delegator_shares defines total shares issued to a validator's delegators. */ + + delegatorShares: string; + /** description defines the description terms for the validator. */ + + description?: Description; + /** unbonding_height defines, if unbonding, the height at which this validator has begun unbonding. */ + + unbondingHeight: Long; + /** unbonding_time defines, if unbonding, the min time for the validator to complete unbonding. */ + + unbondingTime?: Date; + /** commission defines the commission parameters. */ + + commission?: Commission; + /** min_self_delegation is the validator's self declared minimum self delegation. */ + + minSelfDelegation: string; +} +/** + * Validator defines a validator, together with the total amount of the + * Validator's bond shares and their exchange rate to coins. Slashing results in + * a decrease in the exchange rate, allowing correct calculation of future + * undelegations without iterating over delegators. When coins are delegated to + * this validator, the validator is credited with a delegation whose number of + * bond shares is based on the amount of coins delegated divided by the current + * exchange rate. Voting power can be calculated as total bonded shares + * multiplied by exchange rate. + */ + +export interface ValidatorSDKType { + operator_address: string; + consensus_pubkey?: AnySDKType; + jailed: boolean; + status: BondStatus; + tokens: string; + delegator_shares: string; + description?: DescriptionSDKType; + unbonding_height: Long; + unbonding_time?: Date; + commission?: CommissionSDKType; + min_self_delegation: string; +} +/** ValAddresses defines a repeated set of validator addresses. */ + +export interface ValAddresses { + addresses: string[]; +} +/** ValAddresses defines a repeated set of validator addresses. */ + +export interface ValAddressesSDKType { + addresses: string[]; +} +/** + * DVPair is struct that just has a delegator-validator pair with no other data. + * It is intended to be used as a marshalable pointer. For example, a DVPair can + * be used to construct the key to getting an UnbondingDelegation from state. + */ + +export interface DVPair { + delegatorAddress: string; + validatorAddress: string; +} +/** + * DVPair is struct that just has a delegator-validator pair with no other data. + * It is intended to be used as a marshalable pointer. For example, a DVPair can + * be used to construct the key to getting an UnbondingDelegation from state. + */ + +export interface DVPairSDKType { + delegator_address: string; + validator_address: string; +} +/** DVPairs defines an array of DVPair objects. */ + +export interface DVPairs { + pairs: DVPair[]; +} +/** DVPairs defines an array of DVPair objects. */ + +export interface DVPairsSDKType { + pairs: DVPairSDKType[]; +} +/** + * DVVTriplet is struct that just has a delegator-validator-validator triplet + * with no other data. It is intended to be used as a marshalable pointer. For + * example, a DVVTriplet can be used to construct the key to getting a + * Redelegation from state. + */ + +export interface DVVTriplet { + delegatorAddress: string; + validatorSrcAddress: string; + validatorDstAddress: string; +} +/** + * DVVTriplet is struct that just has a delegator-validator-validator triplet + * with no other data. It is intended to be used as a marshalable pointer. For + * example, a DVVTriplet can be used to construct the key to getting a + * Redelegation from state. + */ + +export interface DVVTripletSDKType { + delegator_address: string; + validator_src_address: string; + validator_dst_address: string; +} +/** DVVTriplets defines an array of DVVTriplet objects. */ + +export interface DVVTriplets { + triplets: DVVTriplet[]; +} +/** DVVTriplets defines an array of DVVTriplet objects. */ + +export interface DVVTripletsSDKType { + triplets: DVVTripletSDKType[]; +} +/** + * Delegation represents the bond with tokens held by an account. It is + * owned by one delegator, and is associated with the voting power of one + * validator. + */ + +export interface Delegation { + /** delegator_address is the bech32-encoded address of the delegator. */ + delegatorAddress: string; + /** validator_address is the bech32-encoded address of the validator. */ + + validatorAddress: string; + /** shares define the delegation shares received. */ + + shares: string; +} +/** + * Delegation represents the bond with tokens held by an account. It is + * owned by one delegator, and is associated with the voting power of one + * validator. + */ + +export interface DelegationSDKType { + delegator_address: string; + validator_address: string; + shares: string; +} +/** + * UnbondingDelegation stores all of a single delegator's unbonding bonds + * for a single validator in an time-ordered list. + */ + +export interface UnbondingDelegation { + /** delegator_address is the bech32-encoded address of the delegator. */ + delegatorAddress: string; + /** validator_address is the bech32-encoded address of the validator. */ + + validatorAddress: string; + /** entries are the unbonding delegation entries. */ + + entries: UnbondingDelegationEntry[]; +} +/** + * UnbondingDelegation stores all of a single delegator's unbonding bonds + * for a single validator in an time-ordered list. + */ + +export interface UnbondingDelegationSDKType { + delegator_address: string; + validator_address: string; + entries: UnbondingDelegationEntrySDKType[]; +} +/** UnbondingDelegationEntry defines an unbonding object with relevant metadata. */ + +export interface UnbondingDelegationEntry { + /** creation_height is the height which the unbonding took place. */ + creationHeight: Long; + /** completion_time is the unix time for unbonding completion. */ + + completionTime?: Date; + /** initial_balance defines the tokens initially scheduled to receive at completion. */ + + initialBalance: string; + /** balance defines the tokens to receive at completion. */ + + balance: string; +} +/** UnbondingDelegationEntry defines an unbonding object with relevant metadata. */ + +export interface UnbondingDelegationEntrySDKType { + creation_height: Long; + completion_time?: Date; + initial_balance: string; + balance: string; +} +/** RedelegationEntry defines a redelegation object with relevant metadata. */ + +export interface RedelegationEntry { + /** creation_height defines the height which the redelegation took place. */ + creationHeight: Long; + /** completion_time defines the unix time for redelegation completion. */ + + completionTime?: Date; + /** initial_balance defines the initial balance when redelegation started. */ + + initialBalance: string; + /** shares_dst is the amount of destination-validator shares created by redelegation. */ + + sharesDst: string; +} +/** RedelegationEntry defines a redelegation object with relevant metadata. */ + +export interface RedelegationEntrySDKType { + creation_height: Long; + completion_time?: Date; + initial_balance: string; + shares_dst: string; +} +/** + * Redelegation contains the list of a particular delegator's redelegating bonds + * from a particular source validator to a particular destination validator. + */ + +export interface Redelegation { + /** delegator_address is the bech32-encoded address of the delegator. */ + delegatorAddress: string; + /** validator_src_address is the validator redelegation source operator address. */ + + validatorSrcAddress: string; + /** validator_dst_address is the validator redelegation destination operator address. */ + + validatorDstAddress: string; + /** entries are the redelegation entries. */ + + entries: RedelegationEntry[]; +} +/** + * Redelegation contains the list of a particular delegator's redelegating bonds + * from a particular source validator to a particular destination validator. + */ + +export interface RedelegationSDKType { + delegator_address: string; + validator_src_address: string; + validator_dst_address: string; + entries: RedelegationEntrySDKType[]; +} +/** Params defines the parameters for the staking module. */ + +export interface Params { + /** unbonding_time is the time duration of unbonding. */ + unbondingTime?: Duration; + /** max_validators is the maximum number of validators. */ + + maxValidators: number; + /** max_entries is the max entries for either unbonding delegation or redelegation (per pair/trio). */ + + maxEntries: number; + /** historical_entries is the number of historical entries to persist. */ + + historicalEntries: number; + /** bond_denom defines the bondable coin denomination. */ + + bondDenom: string; + /** min_commission_rate is the chain-wide minimum commission rate that a validator can charge their delegators */ + + minCommissionRate: string; +} +/** Params defines the parameters for the staking module. */ + +export interface ParamsSDKType { + unbonding_time?: DurationSDKType; + max_validators: number; + max_entries: number; + historical_entries: number; + bond_denom: string; + min_commission_rate: string; +} +/** + * DelegationResponse is equivalent to Delegation except that it contains a + * balance in addition to shares which is more suitable for client responses. + */ + +export interface DelegationResponse { + delegation?: Delegation; + balance?: Coin; +} +/** + * DelegationResponse is equivalent to Delegation except that it contains a + * balance in addition to shares which is more suitable for client responses. + */ + +export interface DelegationResponseSDKType { + delegation?: DelegationSDKType; + balance?: CoinSDKType; +} +/** + * RedelegationEntryResponse is equivalent to a RedelegationEntry except that it + * contains a balance in addition to shares which is more suitable for client + * responses. + */ + +export interface RedelegationEntryResponse { + redelegationEntry?: RedelegationEntry; + balance: string; +} +/** + * RedelegationEntryResponse is equivalent to a RedelegationEntry except that it + * contains a balance in addition to shares which is more suitable for client + * responses. + */ + +export interface RedelegationEntryResponseSDKType { + redelegation_entry?: RedelegationEntrySDKType; + balance: string; +} +/** + * RedelegationResponse is equivalent to a Redelegation except that its entries + * contain a balance in addition to shares which is more suitable for client + * responses. + */ + +export interface RedelegationResponse { + redelegation?: Redelegation; + entries: RedelegationEntryResponse[]; +} +/** + * RedelegationResponse is equivalent to a Redelegation except that its entries + * contain a balance in addition to shares which is more suitable for client + * responses. + */ + +export interface RedelegationResponseSDKType { + redelegation?: RedelegationSDKType; + entries: RedelegationEntryResponseSDKType[]; +} +/** + * Pool is used for tracking bonded and not-bonded token supply of the bond + * denomination. + */ + +export interface Pool { + notBondedTokens: string; + bondedTokens: string; +} +/** + * Pool is used for tracking bonded and not-bonded token supply of the bond + * denomination. + */ + +export interface PoolSDKType { + not_bonded_tokens: string; + bonded_tokens: string; +} + +function createBaseHistoricalInfo(): HistoricalInfo { + return { + header: undefined, + valset: [] + }; +} + +export const HistoricalInfo = { + encode(message: HistoricalInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.valset) { + Validator.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HistoricalInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHistoricalInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.header = Header.decode(reader, reader.uint32()); + break; + + case 2: + message.valset.push(Validator.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): HistoricalInfo { + const message = createBaseHistoricalInfo(); + message.header = object.header !== undefined && object.header !== null ? Header.fromPartial(object.header) : undefined; + message.valset = object.valset?.map(e => Validator.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseCommissionRates(): CommissionRates { + return { + rate: "", + maxRate: "", + maxChangeRate: "" + }; +} + +export const CommissionRates = { + encode(message: CommissionRates, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.rate !== "") { + writer.uint32(10).string(message.rate); + } + + if (message.maxRate !== "") { + writer.uint32(18).string(message.maxRate); + } + + if (message.maxChangeRate !== "") { + writer.uint32(26).string(message.maxChangeRate); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CommissionRates { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommissionRates(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.rate = reader.string(); + break; + + case 2: + message.maxRate = reader.string(); + break; + + case 3: + message.maxChangeRate = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CommissionRates { + const message = createBaseCommissionRates(); + message.rate = object.rate ?? ""; + message.maxRate = object.maxRate ?? ""; + message.maxChangeRate = object.maxChangeRate ?? ""; + return message; + } + +}; + +function createBaseCommission(): Commission { + return { + commissionRates: undefined, + updateTime: undefined + }; +} + +export const Commission = { + encode(message: Commission, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.commissionRates !== undefined) { + CommissionRates.encode(message.commissionRates, writer.uint32(10).fork()).ldelim(); + } + + if (message.updateTime !== undefined) { + Timestamp.encode(toTimestamp(message.updateTime), writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Commission { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommission(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.commissionRates = CommissionRates.decode(reader, reader.uint32()); + break; + + case 2: + message.updateTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Commission { + const message = createBaseCommission(); + message.commissionRates = object.commissionRates !== undefined && object.commissionRates !== null ? CommissionRates.fromPartial(object.commissionRates) : undefined; + message.updateTime = object.updateTime ?? undefined; + return message; + } + +}; + +function createBaseDescription(): Description { + return { + moniker: "", + identity: "", + website: "", + securityContact: "", + details: "" + }; +} + +export const Description = { + encode(message: Description, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.moniker !== "") { + writer.uint32(10).string(message.moniker); + } + + if (message.identity !== "") { + writer.uint32(18).string(message.identity); + } + + if (message.website !== "") { + writer.uint32(26).string(message.website); + } + + if (message.securityContact !== "") { + writer.uint32(34).string(message.securityContact); + } + + if (message.details !== "") { + writer.uint32(42).string(message.details); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Description { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescription(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.moniker = reader.string(); + break; + + case 2: + message.identity = reader.string(); + break; + + case 3: + message.website = reader.string(); + break; + + case 4: + message.securityContact = reader.string(); + break; + + case 5: + message.details = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Description { + const message = createBaseDescription(); + message.moniker = object.moniker ?? ""; + message.identity = object.identity ?? ""; + message.website = object.website ?? ""; + message.securityContact = object.securityContact ?? ""; + message.details = object.details ?? ""; + return message; + } + +}; + +function createBaseValidator(): Validator { + return { + operatorAddress: "", + consensusPubkey: undefined, + jailed: false, + status: 0, + tokens: "", + delegatorShares: "", + description: undefined, + unbondingHeight: Long.ZERO, + unbondingTime: undefined, + commission: undefined, + minSelfDelegation: "" + }; +} + +export const Validator = { + encode(message: Validator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.operatorAddress !== "") { + writer.uint32(10).string(message.operatorAddress); + } + + if (message.consensusPubkey !== undefined) { + Any.encode(message.consensusPubkey, writer.uint32(18).fork()).ldelim(); + } + + if (message.jailed === true) { + writer.uint32(24).bool(message.jailed); + } + + if (message.status !== 0) { + writer.uint32(32).int32(message.status); + } + + if (message.tokens !== "") { + writer.uint32(42).string(message.tokens); + } + + if (message.delegatorShares !== "") { + writer.uint32(50).string(message.delegatorShares); + } + + if (message.description !== undefined) { + Description.encode(message.description, writer.uint32(58).fork()).ldelim(); + } + + if (!message.unbondingHeight.isZero()) { + writer.uint32(64).int64(message.unbondingHeight); + } + + if (message.unbondingTime !== undefined) { + Timestamp.encode(toTimestamp(message.unbondingTime), writer.uint32(74).fork()).ldelim(); + } + + if (message.commission !== undefined) { + Commission.encode(message.commission, writer.uint32(82).fork()).ldelim(); + } + + if (message.minSelfDelegation !== "") { + writer.uint32(90).string(message.minSelfDelegation); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Validator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidator(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.operatorAddress = reader.string(); + break; + + case 2: + message.consensusPubkey = Any.decode(reader, reader.uint32()); + break; + + case 3: + message.jailed = reader.bool(); + break; + + case 4: + message.status = (reader.int32() as any); + break; + + case 5: + message.tokens = reader.string(); + break; + + case 6: + message.delegatorShares = reader.string(); + break; + + case 7: + message.description = Description.decode(reader, reader.uint32()); + break; + + case 8: + message.unbondingHeight = (reader.int64() as Long); + break; + + case 9: + message.unbondingTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 10: + message.commission = Commission.decode(reader, reader.uint32()); + break; + + case 11: + message.minSelfDelegation = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Validator { + const message = createBaseValidator(); + message.operatorAddress = object.operatorAddress ?? ""; + message.consensusPubkey = object.consensusPubkey !== undefined && object.consensusPubkey !== null ? Any.fromPartial(object.consensusPubkey) : undefined; + message.jailed = object.jailed ?? false; + message.status = object.status ?? 0; + message.tokens = object.tokens ?? ""; + message.delegatorShares = object.delegatorShares ?? ""; + message.description = object.description !== undefined && object.description !== null ? Description.fromPartial(object.description) : undefined; + message.unbondingHeight = object.unbondingHeight !== undefined && object.unbondingHeight !== null ? Long.fromValue(object.unbondingHeight) : Long.ZERO; + message.unbondingTime = object.unbondingTime ?? undefined; + message.commission = object.commission !== undefined && object.commission !== null ? Commission.fromPartial(object.commission) : undefined; + message.minSelfDelegation = object.minSelfDelegation ?? ""; + return message; + } + +}; + +function createBaseValAddresses(): ValAddresses { + return { + addresses: [] + }; +} + +export const ValAddresses = { + encode(message: ValAddresses, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.addresses) { + writer.uint32(10).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValAddresses { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValAddresses(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.addresses.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValAddresses { + const message = createBaseValAddresses(); + message.addresses = object.addresses?.map(e => e) || []; + return message; + } + +}; + +function createBaseDVPair(): DVPair { + return { + delegatorAddress: "", + validatorAddress: "" + }; +} + +export const DVPair = { + encode(message: DVPair, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.validatorAddress !== "") { + writer.uint32(18).string(message.validatorAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DVPair { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDVPair(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.validatorAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DVPair { + const message = createBaseDVPair(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.validatorAddress = object.validatorAddress ?? ""; + return message; + } + +}; + +function createBaseDVPairs(): DVPairs { + return { + pairs: [] + }; +} + +export const DVPairs = { + encode(message: DVPairs, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.pairs) { + DVPair.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DVPairs { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDVPairs(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pairs.push(DVPair.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DVPairs { + const message = createBaseDVPairs(); + message.pairs = object.pairs?.map(e => DVPair.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseDVVTriplet(): DVVTriplet { + return { + delegatorAddress: "", + validatorSrcAddress: "", + validatorDstAddress: "" + }; +} + +export const DVVTriplet = { + encode(message: DVVTriplet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.validatorSrcAddress !== "") { + writer.uint32(18).string(message.validatorSrcAddress); + } + + if (message.validatorDstAddress !== "") { + writer.uint32(26).string(message.validatorDstAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DVVTriplet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDVVTriplet(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.validatorSrcAddress = reader.string(); + break; + + case 3: + message.validatorDstAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DVVTriplet { + const message = createBaseDVVTriplet(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.validatorSrcAddress = object.validatorSrcAddress ?? ""; + message.validatorDstAddress = object.validatorDstAddress ?? ""; + return message; + } + +}; + +function createBaseDVVTriplets(): DVVTriplets { + return { + triplets: [] + }; +} + +export const DVVTriplets = { + encode(message: DVVTriplets, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.triplets) { + DVVTriplet.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DVVTriplets { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDVVTriplets(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.triplets.push(DVVTriplet.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DVVTriplets { + const message = createBaseDVVTriplets(); + message.triplets = object.triplets?.map(e => DVVTriplet.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseDelegation(): Delegation { + return { + delegatorAddress: "", + validatorAddress: "", + shares: "" + }; +} + +export const Delegation = { + encode(message: Delegation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.validatorAddress !== "") { + writer.uint32(18).string(message.validatorAddress); + } + + if (message.shares !== "") { + writer.uint32(26).string(message.shares); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Delegation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDelegation(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.validatorAddress = reader.string(); + break; + + case 3: + message.shares = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Delegation { + const message = createBaseDelegation(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.validatorAddress = object.validatorAddress ?? ""; + message.shares = object.shares ?? ""; + return message; + } + +}; + +function createBaseUnbondingDelegation(): UnbondingDelegation { + return { + delegatorAddress: "", + validatorAddress: "", + entries: [] + }; +} + +export const UnbondingDelegation = { + encode(message: UnbondingDelegation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.validatorAddress !== "") { + writer.uint32(18).string(message.validatorAddress); + } + + for (const v of message.entries) { + UnbondingDelegationEntry.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UnbondingDelegation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUnbondingDelegation(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.validatorAddress = reader.string(); + break; + + case 3: + message.entries.push(UnbondingDelegationEntry.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): UnbondingDelegation { + const message = createBaseUnbondingDelegation(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.validatorAddress = object.validatorAddress ?? ""; + message.entries = object.entries?.map(e => UnbondingDelegationEntry.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseUnbondingDelegationEntry(): UnbondingDelegationEntry { + return { + creationHeight: Long.ZERO, + completionTime: undefined, + initialBalance: "", + balance: "" + }; +} + +export const UnbondingDelegationEntry = { + encode(message: UnbondingDelegationEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.creationHeight.isZero()) { + writer.uint32(8).int64(message.creationHeight); + } + + if (message.completionTime !== undefined) { + Timestamp.encode(toTimestamp(message.completionTime), writer.uint32(18).fork()).ldelim(); + } + + if (message.initialBalance !== "") { + writer.uint32(26).string(message.initialBalance); + } + + if (message.balance !== "") { + writer.uint32(34).string(message.balance); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UnbondingDelegationEntry { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUnbondingDelegationEntry(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.creationHeight = (reader.int64() as Long); + break; + + case 2: + message.completionTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 3: + message.initialBalance = reader.string(); + break; + + case 4: + message.balance = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): UnbondingDelegationEntry { + const message = createBaseUnbondingDelegationEntry(); + message.creationHeight = object.creationHeight !== undefined && object.creationHeight !== null ? Long.fromValue(object.creationHeight) : Long.ZERO; + message.completionTime = object.completionTime ?? undefined; + message.initialBalance = object.initialBalance ?? ""; + message.balance = object.balance ?? ""; + return message; + } + +}; + +function createBaseRedelegationEntry(): RedelegationEntry { + return { + creationHeight: Long.ZERO, + completionTime: undefined, + initialBalance: "", + sharesDst: "" + }; +} + +export const RedelegationEntry = { + encode(message: RedelegationEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.creationHeight.isZero()) { + writer.uint32(8).int64(message.creationHeight); + } + + if (message.completionTime !== undefined) { + Timestamp.encode(toTimestamp(message.completionTime), writer.uint32(18).fork()).ldelim(); + } + + if (message.initialBalance !== "") { + writer.uint32(26).string(message.initialBalance); + } + + if (message.sharesDst !== "") { + writer.uint32(34).string(message.sharesDst); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RedelegationEntry { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRedelegationEntry(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.creationHeight = (reader.int64() as Long); + break; + + case 2: + message.completionTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 3: + message.initialBalance = reader.string(); + break; + + case 4: + message.sharesDst = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RedelegationEntry { + const message = createBaseRedelegationEntry(); + message.creationHeight = object.creationHeight !== undefined && object.creationHeight !== null ? Long.fromValue(object.creationHeight) : Long.ZERO; + message.completionTime = object.completionTime ?? undefined; + message.initialBalance = object.initialBalance ?? ""; + message.sharesDst = object.sharesDst ?? ""; + return message; + } + +}; + +function createBaseRedelegation(): Redelegation { + return { + delegatorAddress: "", + validatorSrcAddress: "", + validatorDstAddress: "", + entries: [] + }; +} + +export const Redelegation = { + encode(message: Redelegation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.validatorSrcAddress !== "") { + writer.uint32(18).string(message.validatorSrcAddress); + } + + if (message.validatorDstAddress !== "") { + writer.uint32(26).string(message.validatorDstAddress); + } + + for (const v of message.entries) { + RedelegationEntry.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Redelegation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRedelegation(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.validatorSrcAddress = reader.string(); + break; + + case 3: + message.validatorDstAddress = reader.string(); + break; + + case 4: + message.entries.push(RedelegationEntry.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Redelegation { + const message = createBaseRedelegation(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.validatorSrcAddress = object.validatorSrcAddress ?? ""; + message.validatorDstAddress = object.validatorDstAddress ?? ""; + message.entries = object.entries?.map(e => RedelegationEntry.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseParams(): Params { + return { + unbondingTime: undefined, + maxValidators: 0, + maxEntries: 0, + historicalEntries: 0, + bondDenom: "", + minCommissionRate: "" + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.unbondingTime !== undefined) { + Duration.encode(message.unbondingTime, writer.uint32(10).fork()).ldelim(); + } + + if (message.maxValidators !== 0) { + writer.uint32(16).uint32(message.maxValidators); + } + + if (message.maxEntries !== 0) { + writer.uint32(24).uint32(message.maxEntries); + } + + if (message.historicalEntries !== 0) { + writer.uint32(32).uint32(message.historicalEntries); + } + + if (message.bondDenom !== "") { + writer.uint32(42).string(message.bondDenom); + } + + if (message.minCommissionRate !== "") { + writer.uint32(50).string(message.minCommissionRate); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.unbondingTime = Duration.decode(reader, reader.uint32()); + break; + + case 2: + message.maxValidators = reader.uint32(); + break; + + case 3: + message.maxEntries = reader.uint32(); + break; + + case 4: + message.historicalEntries = reader.uint32(); + break; + + case 5: + message.bondDenom = reader.string(); + break; + + case 6: + message.minCommissionRate = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Params { + const message = createBaseParams(); + message.unbondingTime = object.unbondingTime !== undefined && object.unbondingTime !== null ? Duration.fromPartial(object.unbondingTime) : undefined; + message.maxValidators = object.maxValidators ?? 0; + message.maxEntries = object.maxEntries ?? 0; + message.historicalEntries = object.historicalEntries ?? 0; + message.bondDenom = object.bondDenom ?? ""; + message.minCommissionRate = object.minCommissionRate ?? ""; + return message; + } + +}; + +function createBaseDelegationResponse(): DelegationResponse { + return { + delegation: undefined, + balance: undefined + }; +} + +export const DelegationResponse = { + encode(message: DelegationResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegation !== undefined) { + Delegation.encode(message.delegation, writer.uint32(10).fork()).ldelim(); + } + + if (message.balance !== undefined) { + Coin.encode(message.balance, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DelegationResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDelegationResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegation = Delegation.decode(reader, reader.uint32()); + break; + + case 2: + message.balance = Coin.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DelegationResponse { + const message = createBaseDelegationResponse(); + message.delegation = object.delegation !== undefined && object.delegation !== null ? Delegation.fromPartial(object.delegation) : undefined; + message.balance = object.balance !== undefined && object.balance !== null ? Coin.fromPartial(object.balance) : undefined; + return message; + } + +}; + +function createBaseRedelegationEntryResponse(): RedelegationEntryResponse { + return { + redelegationEntry: undefined, + balance: "" + }; +} + +export const RedelegationEntryResponse = { + encode(message: RedelegationEntryResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.redelegationEntry !== undefined) { + RedelegationEntry.encode(message.redelegationEntry, writer.uint32(10).fork()).ldelim(); + } + + if (message.balance !== "") { + writer.uint32(34).string(message.balance); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RedelegationEntryResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRedelegationEntryResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.redelegationEntry = RedelegationEntry.decode(reader, reader.uint32()); + break; + + case 4: + message.balance = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RedelegationEntryResponse { + const message = createBaseRedelegationEntryResponse(); + message.redelegationEntry = object.redelegationEntry !== undefined && object.redelegationEntry !== null ? RedelegationEntry.fromPartial(object.redelegationEntry) : undefined; + message.balance = object.balance ?? ""; + return message; + } + +}; + +function createBaseRedelegationResponse(): RedelegationResponse { + return { + redelegation: undefined, + entries: [] + }; +} + +export const RedelegationResponse = { + encode(message: RedelegationResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.redelegation !== undefined) { + Redelegation.encode(message.redelegation, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.entries) { + RedelegationEntryResponse.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RedelegationResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRedelegationResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.redelegation = Redelegation.decode(reader, reader.uint32()); + break; + + case 2: + message.entries.push(RedelegationEntryResponse.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RedelegationResponse { + const message = createBaseRedelegationResponse(); + message.redelegation = object.redelegation !== undefined && object.redelegation !== null ? Redelegation.fromPartial(object.redelegation) : undefined; + message.entries = object.entries?.map(e => RedelegationEntryResponse.fromPartial(e)) || []; + return message; + } + +}; + +function createBasePool(): Pool { + return { + notBondedTokens: "", + bondedTokens: "" + }; +} + +export const Pool = { + encode(message: Pool, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.notBondedTokens !== "") { + writer.uint32(10).string(message.notBondedTokens); + } + + if (message.bondedTokens !== "") { + writer.uint32(18).string(message.bondedTokens); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Pool { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePool(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.notBondedTokens = reader.string(); + break; + + case 2: + message.bondedTokens = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Pool { + const message = createBasePool(); + message.notBondedTokens = object.notBondedTokens ?? ""; + message.bondedTokens = object.bondedTokens ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/staking/v1beta1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/staking/v1beta1/tx.rpc.msg.ts new file mode 100644 index 00000000..b530ca14 --- /dev/null +++ b/packages/codegen/src/cosmos/staking/v1beta1/tx.rpc.msg.ts @@ -0,0 +1,73 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgCreateValidator, MsgCreateValidatorResponse, MsgEditValidator, MsgEditValidatorResponse, MsgDelegate, MsgDelegateResponse, MsgBeginRedelegate, MsgBeginRedelegateResponse, MsgUndelegate, MsgUndelegateResponse } from "./tx"; +/** Msg defines the staking Msg service. */ + +export interface Msg { + /** CreateValidator defines a method for creating a new validator. */ + createValidator(request: MsgCreateValidator): Promise; + /** EditValidator defines a method for editing an existing validator. */ + + editValidator(request: MsgEditValidator): Promise; + /** + * Delegate defines a method for performing a delegation of coins + * from a delegator to a validator. + */ + + delegate(request: MsgDelegate): Promise; + /** + * BeginRedelegate defines a method for performing a redelegation + * of coins from a delegator and source validator to a destination validator. + */ + + beginRedelegate(request: MsgBeginRedelegate): Promise; + /** + * Undelegate defines a method for performing an undelegation from a + * delegate and a validator. + */ + + undelegate(request: MsgUndelegate): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.createValidator = this.createValidator.bind(this); + this.editValidator = this.editValidator.bind(this); + this.delegate = this.delegate.bind(this); + this.beginRedelegate = this.beginRedelegate.bind(this); + this.undelegate = this.undelegate.bind(this); + } + + createValidator(request: MsgCreateValidator): Promise { + const data = MsgCreateValidator.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Msg", "CreateValidator", data); + return promise.then(data => MsgCreateValidatorResponse.decode(new _m0.Reader(data))); + } + + editValidator(request: MsgEditValidator): Promise { + const data = MsgEditValidator.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Msg", "EditValidator", data); + return promise.then(data => MsgEditValidatorResponse.decode(new _m0.Reader(data))); + } + + delegate(request: MsgDelegate): Promise { + const data = MsgDelegate.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Msg", "Delegate", data); + return promise.then(data => MsgDelegateResponse.decode(new _m0.Reader(data))); + } + + beginRedelegate(request: MsgBeginRedelegate): Promise { + const data = MsgBeginRedelegate.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Msg", "BeginRedelegate", data); + return promise.then(data => MsgBeginRedelegateResponse.decode(new _m0.Reader(data))); + } + + undelegate(request: MsgUndelegate): Promise { + const data = MsgUndelegate.encode(request).finish(); + const promise = this.rpc.request("cosmos.staking.v1beta1.Msg", "Undelegate", data); + return promise.then(data => MsgUndelegateResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/staking/v1beta1/tx.ts b/packages/codegen/src/cosmos/staking/v1beta1/tx.ts new file mode 100644 index 00000000..df65c4af --- /dev/null +++ b/packages/codegen/src/cosmos/staking/v1beta1/tx.ts @@ -0,0 +1,728 @@ +import { Description, DescriptionSDKType, CommissionRates, CommissionRatesSDKType } from "./staking"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Timestamp } from "../../../google/protobuf/timestamp"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, toTimestamp, fromTimestamp } from "../../../helpers"; +/** MsgCreateValidator defines a SDK message for creating a new validator. */ + +export interface MsgCreateValidator { + description?: Description; + commission?: CommissionRates; + minSelfDelegation: string; + delegatorAddress: string; + validatorAddress: string; + pubkey?: Any; + value?: Coin; +} +/** MsgCreateValidator defines a SDK message for creating a new validator. */ + +export interface MsgCreateValidatorSDKType { + description?: DescriptionSDKType; + commission?: CommissionRatesSDKType; + min_self_delegation: string; + delegator_address: string; + validator_address: string; + pubkey?: AnySDKType; + value?: CoinSDKType; +} +/** MsgCreateValidatorResponse defines the Msg/CreateValidator response type. */ + +export interface MsgCreateValidatorResponse {} +/** MsgCreateValidatorResponse defines the Msg/CreateValidator response type. */ + +export interface MsgCreateValidatorResponseSDKType {} +/** MsgEditValidator defines a SDK message for editing an existing validator. */ + +export interface MsgEditValidator { + description?: Description; + validatorAddress: string; + /** + * We pass a reference to the new commission rate and min self delegation as + * it's not mandatory to update. If not updated, the deserialized rate will be + * zero with no way to distinguish if an update was intended. + * REF: #2373 + */ + + commissionRate: string; + minSelfDelegation: string; +} +/** MsgEditValidator defines a SDK message for editing an existing validator. */ + +export interface MsgEditValidatorSDKType { + description?: DescriptionSDKType; + validator_address: string; + commission_rate: string; + min_self_delegation: string; +} +/** MsgEditValidatorResponse defines the Msg/EditValidator response type. */ + +export interface MsgEditValidatorResponse {} +/** MsgEditValidatorResponse defines the Msg/EditValidator response type. */ + +export interface MsgEditValidatorResponseSDKType {} +/** + * MsgDelegate defines a SDK message for performing a delegation of coins + * from a delegator to a validator. + */ + +export interface MsgDelegate { + delegatorAddress: string; + validatorAddress: string; + amount?: Coin; +} +/** + * MsgDelegate defines a SDK message for performing a delegation of coins + * from a delegator to a validator. + */ + +export interface MsgDelegateSDKType { + delegator_address: string; + validator_address: string; + amount?: CoinSDKType; +} +/** MsgDelegateResponse defines the Msg/Delegate response type. */ + +export interface MsgDelegateResponse {} +/** MsgDelegateResponse defines the Msg/Delegate response type. */ + +export interface MsgDelegateResponseSDKType {} +/** + * MsgBeginRedelegate defines a SDK message for performing a redelegation + * of coins from a delegator and source validator to a destination validator. + */ + +export interface MsgBeginRedelegate { + delegatorAddress: string; + validatorSrcAddress: string; + validatorDstAddress: string; + amount?: Coin; +} +/** + * MsgBeginRedelegate defines a SDK message for performing a redelegation + * of coins from a delegator and source validator to a destination validator. + */ + +export interface MsgBeginRedelegateSDKType { + delegator_address: string; + validator_src_address: string; + validator_dst_address: string; + amount?: CoinSDKType; +} +/** MsgBeginRedelegateResponse defines the Msg/BeginRedelegate response type. */ + +export interface MsgBeginRedelegateResponse { + completionTime?: Date; +} +/** MsgBeginRedelegateResponse defines the Msg/BeginRedelegate response type. */ + +export interface MsgBeginRedelegateResponseSDKType { + completion_time?: Date; +} +/** + * MsgUndelegate defines a SDK message for performing an undelegation from a + * delegate and a validator. + */ + +export interface MsgUndelegate { + delegatorAddress: string; + validatorAddress: string; + amount?: Coin; +} +/** + * MsgUndelegate defines a SDK message for performing an undelegation from a + * delegate and a validator. + */ + +export interface MsgUndelegateSDKType { + delegator_address: string; + validator_address: string; + amount?: CoinSDKType; +} +/** MsgUndelegateResponse defines the Msg/Undelegate response type. */ + +export interface MsgUndelegateResponse { + completionTime?: Date; +} +/** MsgUndelegateResponse defines the Msg/Undelegate response type. */ + +export interface MsgUndelegateResponseSDKType { + completion_time?: Date; +} + +function createBaseMsgCreateValidator(): MsgCreateValidator { + return { + description: undefined, + commission: undefined, + minSelfDelegation: "", + delegatorAddress: "", + validatorAddress: "", + pubkey: undefined, + value: undefined + }; +} + +export const MsgCreateValidator = { + encode(message: MsgCreateValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.description !== undefined) { + Description.encode(message.description, writer.uint32(10).fork()).ldelim(); + } + + if (message.commission !== undefined) { + CommissionRates.encode(message.commission, writer.uint32(18).fork()).ldelim(); + } + + if (message.minSelfDelegation !== "") { + writer.uint32(26).string(message.minSelfDelegation); + } + + if (message.delegatorAddress !== "") { + writer.uint32(34).string(message.delegatorAddress); + } + + if (message.validatorAddress !== "") { + writer.uint32(42).string(message.validatorAddress); + } + + if (message.pubkey !== undefined) { + Any.encode(message.pubkey, writer.uint32(50).fork()).ldelim(); + } + + if (message.value !== undefined) { + Coin.encode(message.value, writer.uint32(58).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateValidator(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.description = Description.decode(reader, reader.uint32()); + break; + + case 2: + message.commission = CommissionRates.decode(reader, reader.uint32()); + break; + + case 3: + message.minSelfDelegation = reader.string(); + break; + + case 4: + message.delegatorAddress = reader.string(); + break; + + case 5: + message.validatorAddress = reader.string(); + break; + + case 6: + message.pubkey = Any.decode(reader, reader.uint32()); + break; + + case 7: + message.value = Coin.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCreateValidator { + const message = createBaseMsgCreateValidator(); + message.description = object.description !== undefined && object.description !== null ? Description.fromPartial(object.description) : undefined; + message.commission = object.commission !== undefined && object.commission !== null ? CommissionRates.fromPartial(object.commission) : undefined; + message.minSelfDelegation = object.minSelfDelegation ?? ""; + message.delegatorAddress = object.delegatorAddress ?? ""; + message.validatorAddress = object.validatorAddress ?? ""; + message.pubkey = object.pubkey !== undefined && object.pubkey !== null ? Any.fromPartial(object.pubkey) : undefined; + message.value = object.value !== undefined && object.value !== null ? Coin.fromPartial(object.value) : undefined; + return message; + } + +}; + +function createBaseMsgCreateValidatorResponse(): MsgCreateValidatorResponse { + return {}; +} + +export const MsgCreateValidatorResponse = { + encode(_: MsgCreateValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateValidatorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgCreateValidatorResponse { + const message = createBaseMsgCreateValidatorResponse(); + return message; + } + +}; + +function createBaseMsgEditValidator(): MsgEditValidator { + return { + description: undefined, + validatorAddress: "", + commissionRate: "", + minSelfDelegation: "" + }; +} + +export const MsgEditValidator = { + encode(message: MsgEditValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.description !== undefined) { + Description.encode(message.description, writer.uint32(10).fork()).ldelim(); + } + + if (message.validatorAddress !== "") { + writer.uint32(18).string(message.validatorAddress); + } + + if (message.commissionRate !== "") { + writer.uint32(26).string(message.commissionRate); + } + + if (message.minSelfDelegation !== "") { + writer.uint32(34).string(message.minSelfDelegation); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgEditValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgEditValidator(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.description = Description.decode(reader, reader.uint32()); + break; + + case 2: + message.validatorAddress = reader.string(); + break; + + case 3: + message.commissionRate = reader.string(); + break; + + case 4: + message.minSelfDelegation = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgEditValidator { + const message = createBaseMsgEditValidator(); + message.description = object.description !== undefined && object.description !== null ? Description.fromPartial(object.description) : undefined; + message.validatorAddress = object.validatorAddress ?? ""; + message.commissionRate = object.commissionRate ?? ""; + message.minSelfDelegation = object.minSelfDelegation ?? ""; + return message; + } + +}; + +function createBaseMsgEditValidatorResponse(): MsgEditValidatorResponse { + return {}; +} + +export const MsgEditValidatorResponse = { + encode(_: MsgEditValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgEditValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgEditValidatorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgEditValidatorResponse { + const message = createBaseMsgEditValidatorResponse(); + return message; + } + +}; + +function createBaseMsgDelegate(): MsgDelegate { + return { + delegatorAddress: "", + validatorAddress: "", + amount: undefined + }; +} + +export const MsgDelegate = { + encode(message: MsgDelegate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.validatorAddress !== "") { + writer.uint32(18).string(message.validatorAddress); + } + + if (message.amount !== undefined) { + Coin.encode(message.amount, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDelegate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDelegate(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.validatorAddress = reader.string(); + break; + + case 3: + message.amount = Coin.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgDelegate { + const message = createBaseMsgDelegate(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.validatorAddress = object.validatorAddress ?? ""; + message.amount = object.amount !== undefined && object.amount !== null ? Coin.fromPartial(object.amount) : undefined; + return message; + } + +}; + +function createBaseMsgDelegateResponse(): MsgDelegateResponse { + return {}; +} + +export const MsgDelegateResponse = { + encode(_: MsgDelegateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDelegateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDelegateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgDelegateResponse { + const message = createBaseMsgDelegateResponse(); + return message; + } + +}; + +function createBaseMsgBeginRedelegate(): MsgBeginRedelegate { + return { + delegatorAddress: "", + validatorSrcAddress: "", + validatorDstAddress: "", + amount: undefined + }; +} + +export const MsgBeginRedelegate = { + encode(message: MsgBeginRedelegate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.validatorSrcAddress !== "") { + writer.uint32(18).string(message.validatorSrcAddress); + } + + if (message.validatorDstAddress !== "") { + writer.uint32(26).string(message.validatorDstAddress); + } + + if (message.amount !== undefined) { + Coin.encode(message.amount, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgBeginRedelegate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgBeginRedelegate(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.validatorSrcAddress = reader.string(); + break; + + case 3: + message.validatorDstAddress = reader.string(); + break; + + case 4: + message.amount = Coin.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgBeginRedelegate { + const message = createBaseMsgBeginRedelegate(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.validatorSrcAddress = object.validatorSrcAddress ?? ""; + message.validatorDstAddress = object.validatorDstAddress ?? ""; + message.amount = object.amount !== undefined && object.amount !== null ? Coin.fromPartial(object.amount) : undefined; + return message; + } + +}; + +function createBaseMsgBeginRedelegateResponse(): MsgBeginRedelegateResponse { + return { + completionTime: undefined + }; +} + +export const MsgBeginRedelegateResponse = { + encode(message: MsgBeginRedelegateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.completionTime !== undefined) { + Timestamp.encode(toTimestamp(message.completionTime), writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgBeginRedelegateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgBeginRedelegateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.completionTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgBeginRedelegateResponse { + const message = createBaseMsgBeginRedelegateResponse(); + message.completionTime = object.completionTime ?? undefined; + return message; + } + +}; + +function createBaseMsgUndelegate(): MsgUndelegate { + return { + delegatorAddress: "", + validatorAddress: "", + amount: undefined + }; +} + +export const MsgUndelegate = { + encode(message: MsgUndelegate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.delegatorAddress !== "") { + writer.uint32(10).string(message.delegatorAddress); + } + + if (message.validatorAddress !== "") { + writer.uint32(18).string(message.validatorAddress); + } + + if (message.amount !== undefined) { + Coin.encode(message.amount, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUndelegate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUndelegate(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.delegatorAddress = reader.string(); + break; + + case 2: + message.validatorAddress = reader.string(); + break; + + case 3: + message.amount = Coin.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUndelegate { + const message = createBaseMsgUndelegate(); + message.delegatorAddress = object.delegatorAddress ?? ""; + message.validatorAddress = object.validatorAddress ?? ""; + message.amount = object.amount !== undefined && object.amount !== null ? Coin.fromPartial(object.amount) : undefined; + return message; + } + +}; + +function createBaseMsgUndelegateResponse(): MsgUndelegateResponse { + return { + completionTime: undefined + }; +} + +export const MsgUndelegateResponse = { + encode(message: MsgUndelegateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.completionTime !== undefined) { + Timestamp.encode(toTimestamp(message.completionTime), writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUndelegateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUndelegateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.completionTime = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUndelegateResponse { + const message = createBaseMsgUndelegateResponse(); + message.completionTime = object.completionTime ?? undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/tx/signing/v1beta1/signing.ts b/packages/codegen/src/cosmos/tx/signing/v1beta1/signing.ts new file mode 100644 index 00000000..a13bd8ec --- /dev/null +++ b/packages/codegen/src/cosmos/tx/signing/v1beta1/signing.ts @@ -0,0 +1,465 @@ +import { CompactBitArray, CompactBitArraySDKType } from "../../../crypto/multisig/v1beta1/multisig"; +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * SignMode represents a signing mode with its own security guarantees. + * + * This enum should be considered a registry of all known sign modes + * in the Cosmos ecosystem. Apps are not expected to support all known + * sign modes. Apps that would like to support custom sign modes are + * encouraged to open a small PR against this file to add a new case + * to this SignMode enum describing their sign mode so that different + * apps have a consistent version of this enum. + */ + +export enum SignMode { + /** + * SIGN_MODE_UNSPECIFIED - SIGN_MODE_UNSPECIFIED specifies an unknown signing mode and will be + * rejected. + */ + SIGN_MODE_UNSPECIFIED = 0, + + /** + * SIGN_MODE_DIRECT - SIGN_MODE_DIRECT specifies a signing mode which uses SignDoc and is + * verified with raw bytes from Tx. + */ + SIGN_MODE_DIRECT = 1, + + /** + * SIGN_MODE_TEXTUAL - SIGN_MODE_TEXTUAL is a future signing mode that will verify some + * human-readable textual representation on top of the binary representation + * from SIGN_MODE_DIRECT. It is currently not supported. + */ + SIGN_MODE_TEXTUAL = 2, + + /** + * SIGN_MODE_DIRECT_AUX - SIGN_MODE_DIRECT_AUX specifies a signing mode which uses + * SignDocDirectAux. As opposed to SIGN_MODE_DIRECT, this sign mode does not + * require signers signing over other signers' `signer_info`. It also allows + * for adding Tips in transactions. + * + * Since: cosmos-sdk 0.46 + */ + SIGN_MODE_DIRECT_AUX = 3, + + /** + * SIGN_MODE_LEGACY_AMINO_JSON - SIGN_MODE_LEGACY_AMINO_JSON is a backwards compatibility mode which uses + * Amino JSON and will be removed in the future. + */ + SIGN_MODE_LEGACY_AMINO_JSON = 127, + UNRECOGNIZED = -1, +} +export const SignModeSDKType = SignMode; +export function signModeFromJSON(object: any): SignMode { + switch (object) { + case 0: + case "SIGN_MODE_UNSPECIFIED": + return SignMode.SIGN_MODE_UNSPECIFIED; + + case 1: + case "SIGN_MODE_DIRECT": + return SignMode.SIGN_MODE_DIRECT; + + case 2: + case "SIGN_MODE_TEXTUAL": + return SignMode.SIGN_MODE_TEXTUAL; + + case 3: + case "SIGN_MODE_DIRECT_AUX": + return SignMode.SIGN_MODE_DIRECT_AUX; + + case 127: + case "SIGN_MODE_LEGACY_AMINO_JSON": + return SignMode.SIGN_MODE_LEGACY_AMINO_JSON; + + case -1: + case "UNRECOGNIZED": + default: + return SignMode.UNRECOGNIZED; + } +} +export function signModeToJSON(object: SignMode): string { + switch (object) { + case SignMode.SIGN_MODE_UNSPECIFIED: + return "SIGN_MODE_UNSPECIFIED"; + + case SignMode.SIGN_MODE_DIRECT: + return "SIGN_MODE_DIRECT"; + + case SignMode.SIGN_MODE_TEXTUAL: + return "SIGN_MODE_TEXTUAL"; + + case SignMode.SIGN_MODE_DIRECT_AUX: + return "SIGN_MODE_DIRECT_AUX"; + + case SignMode.SIGN_MODE_LEGACY_AMINO_JSON: + return "SIGN_MODE_LEGACY_AMINO_JSON"; + + case SignMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** SignatureDescriptors wraps multiple SignatureDescriptor's. */ + +export interface SignatureDescriptors { + /** signatures are the signature descriptors */ + signatures: SignatureDescriptor[]; +} +/** SignatureDescriptors wraps multiple SignatureDescriptor's. */ + +export interface SignatureDescriptorsSDKType { + signatures: SignatureDescriptorSDKType[]; +} +/** + * SignatureDescriptor is a convenience type which represents the full data for + * a signature including the public key of the signer, signing modes and the + * signature itself. It is primarily used for coordinating signatures between + * clients. + */ + +export interface SignatureDescriptor { + /** public_key is the public key of the signer */ + publicKey?: Any; + data?: SignatureDescriptor_Data; + /** + * sequence is the sequence of the account, which describes the + * number of committed transactions signed by a given address. It is used to prevent + * replay attacks. + */ + + sequence: Long; +} +/** + * SignatureDescriptor is a convenience type which represents the full data for + * a signature including the public key of the signer, signing modes and the + * signature itself. It is primarily used for coordinating signatures between + * clients. + */ + +export interface SignatureDescriptorSDKType { + public_key?: AnySDKType; + data?: SignatureDescriptor_DataSDKType; + sequence: Long; +} +/** Data represents signature data */ + +export interface SignatureDescriptor_Data { + /** single represents a single signer */ + single?: SignatureDescriptor_Data_Single; + /** multi represents a multisig signer */ + + multi?: SignatureDescriptor_Data_Multi; +} +/** Data represents signature data */ + +export interface SignatureDescriptor_DataSDKType { + single?: SignatureDescriptor_Data_SingleSDKType; + multi?: SignatureDescriptor_Data_MultiSDKType; +} +/** Single is the signature data for a single signer */ + +export interface SignatureDescriptor_Data_Single { + /** mode is the signing mode of the single signer */ + mode: SignMode; + /** signature is the raw signature bytes */ + + signature: Uint8Array; +} +/** Single is the signature data for a single signer */ + +export interface SignatureDescriptor_Data_SingleSDKType { + mode: SignMode; + signature: Uint8Array; +} +/** Multi is the signature data for a multisig public key */ + +export interface SignatureDescriptor_Data_Multi { + /** bitarray specifies which keys within the multisig are signing */ + bitarray?: CompactBitArray; + /** signatures is the signatures of the multi-signature */ + + signatures: SignatureDescriptor_Data[]; +} +/** Multi is the signature data for a multisig public key */ + +export interface SignatureDescriptor_Data_MultiSDKType { + bitarray?: CompactBitArraySDKType; + signatures: SignatureDescriptor_DataSDKType[]; +} + +function createBaseSignatureDescriptors(): SignatureDescriptors { + return { + signatures: [] + }; +} + +export const SignatureDescriptors = { + encode(message: SignatureDescriptors, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.signatures) { + SignatureDescriptor.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptors { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignatureDescriptors(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.signatures.push(SignatureDescriptor.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignatureDescriptors { + const message = createBaseSignatureDescriptors(); + message.signatures = object.signatures?.map(e => SignatureDescriptor.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseSignatureDescriptor(): SignatureDescriptor { + return { + publicKey: undefined, + data: undefined, + sequence: Long.UZERO + }; +} + +export const SignatureDescriptor = { + encode(message: SignatureDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.publicKey !== undefined) { + Any.encode(message.publicKey, writer.uint32(10).fork()).ldelim(); + } + + if (message.data !== undefined) { + SignatureDescriptor_Data.encode(message.data, writer.uint32(18).fork()).ldelim(); + } + + if (!message.sequence.isZero()) { + writer.uint32(24).uint64(message.sequence); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignatureDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.publicKey = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.data = SignatureDescriptor_Data.decode(reader, reader.uint32()); + break; + + case 3: + message.sequence = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignatureDescriptor { + const message = createBaseSignatureDescriptor(); + message.publicKey = object.publicKey !== undefined && object.publicKey !== null ? Any.fromPartial(object.publicKey) : undefined; + message.data = object.data !== undefined && object.data !== null ? SignatureDescriptor_Data.fromPartial(object.data) : undefined; + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + return message; + } + +}; + +function createBaseSignatureDescriptor_Data(): SignatureDescriptor_Data { + return { + single: undefined, + multi: undefined + }; +} + +export const SignatureDescriptor_Data = { + encode(message: SignatureDescriptor_Data, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.single !== undefined) { + SignatureDescriptor_Data_Single.encode(message.single, writer.uint32(10).fork()).ldelim(); + } + + if (message.multi !== undefined) { + SignatureDescriptor_Data_Multi.encode(message.multi, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptor_Data { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignatureDescriptor_Data(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.single = SignatureDescriptor_Data_Single.decode(reader, reader.uint32()); + break; + + case 2: + message.multi = SignatureDescriptor_Data_Multi.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignatureDescriptor_Data { + const message = createBaseSignatureDescriptor_Data(); + message.single = object.single !== undefined && object.single !== null ? SignatureDescriptor_Data_Single.fromPartial(object.single) : undefined; + message.multi = object.multi !== undefined && object.multi !== null ? SignatureDescriptor_Data_Multi.fromPartial(object.multi) : undefined; + return message; + } + +}; + +function createBaseSignatureDescriptor_Data_Single(): SignatureDescriptor_Data_Single { + return { + mode: 0, + signature: new Uint8Array() + }; +} + +export const SignatureDescriptor_Data_Single = { + encode(message: SignatureDescriptor_Data_Single, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.mode !== 0) { + writer.uint32(8).int32(message.mode); + } + + if (message.signature.length !== 0) { + writer.uint32(18).bytes(message.signature); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptor_Data_Single { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignatureDescriptor_Data_Single(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.mode = (reader.int32() as any); + break; + + case 2: + message.signature = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignatureDescriptor_Data_Single { + const message = createBaseSignatureDescriptor_Data_Single(); + message.mode = object.mode ?? 0; + message.signature = object.signature ?? new Uint8Array(); + return message; + } + +}; + +function createBaseSignatureDescriptor_Data_Multi(): SignatureDescriptor_Data_Multi { + return { + bitarray: undefined, + signatures: [] + }; +} + +export const SignatureDescriptor_Data_Multi = { + encode(message: SignatureDescriptor_Data_Multi, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bitarray !== undefined) { + CompactBitArray.encode(message.bitarray, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.signatures) { + SignatureDescriptor_Data.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptor_Data_Multi { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignatureDescriptor_Data_Multi(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.bitarray = CompactBitArray.decode(reader, reader.uint32()); + break; + + case 2: + message.signatures.push(SignatureDescriptor_Data.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignatureDescriptor_Data_Multi { + const message = createBaseSignatureDescriptor_Data_Multi(); + message.bitarray = object.bitarray !== undefined && object.bitarray !== null ? CompactBitArray.fromPartial(object.bitarray) : undefined; + message.signatures = object.signatures?.map(e => SignatureDescriptor_Data.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/tx/v1beta1/service.lcd.ts b/packages/codegen/src/cosmos/tx/v1beta1/service.lcd.ts new file mode 100644 index 00000000..ee31b21e --- /dev/null +++ b/packages/codegen/src/cosmos/tx/v1beta1/service.lcd.ts @@ -0,0 +1,65 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { GetTxRequest, GetTxResponseSDKType, GetTxsEventRequest, GetTxsEventResponseSDKType, GetBlockWithTxsRequest, GetBlockWithTxsResponseSDKType } from "./service"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.getTx = this.getTx.bind(this); + this.getTxsEvent = this.getTxsEvent.bind(this); + this.getBlockWithTxs = this.getBlockWithTxs.bind(this); + } + /* GetTx fetches a tx by hash. */ + + + async getTx(params: GetTxRequest): Promise { + const endpoint = `cosmos/tx/v1beta1/txs/${params.hash}`; + return await this.req.get(endpoint); + } + /* GetTxsEvent fetches txs by event. */ + + + async getTxsEvent(params: GetTxsEventRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.events !== "undefined") { + options.params.events = params.events; + } + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + if (typeof params?.orderBy !== "undefined") { + options.params.order_by = params.orderBy; + } + + const endpoint = `cosmos/tx/v1beta1/txs`; + return await this.req.get(endpoint, options); + } + /* GetBlockWithTxs fetches a block with decoded txs. + + Since: cosmos-sdk 0.45.2 */ + + + async getBlockWithTxs(params: GetBlockWithTxsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmos/tx/v1beta1/txs/block/${params.height}`; + return await this.req.get(endpoint, options); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/tx/v1beta1/service.rpc.Service.ts b/packages/codegen/src/cosmos/tx/v1beta1/service.rpc.Service.ts new file mode 100644 index 00000000..385b42d5 --- /dev/null +++ b/packages/codegen/src/cosmos/tx/v1beta1/service.rpc.Service.ts @@ -0,0 +1,95 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { SimulateRequest, SimulateResponse, GetTxRequest, GetTxResponse, BroadcastTxRequest, BroadcastTxResponse, GetTxsEventRequest, GetTxsEventResponse, GetBlockWithTxsRequest, GetBlockWithTxsResponse } from "./service"; +/** Service defines a gRPC service for interacting with transactions. */ + +export interface Service { + /** Simulate simulates executing a transaction for estimating gas usage. */ + simulate(request: SimulateRequest): Promise; + /** GetTx fetches a tx by hash. */ + + getTx(request: GetTxRequest): Promise; + /** BroadcastTx broadcast transaction. */ + + broadcastTx(request: BroadcastTxRequest): Promise; + /** GetTxsEvent fetches txs by event. */ + + getTxsEvent(request: GetTxsEventRequest): Promise; + /** + * GetBlockWithTxs fetches a block with decoded txs. + * + * Since: cosmos-sdk 0.45.2 + */ + + getBlockWithTxs(request: GetBlockWithTxsRequest): Promise; +} +export class ServiceClientImpl implements Service { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.simulate = this.simulate.bind(this); + this.getTx = this.getTx.bind(this); + this.broadcastTx = this.broadcastTx.bind(this); + this.getTxsEvent = this.getTxsEvent.bind(this); + this.getBlockWithTxs = this.getBlockWithTxs.bind(this); + } + + simulate(request: SimulateRequest): Promise { + const data = SimulateRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "Simulate", data); + return promise.then(data => SimulateResponse.decode(new _m0.Reader(data))); + } + + getTx(request: GetTxRequest): Promise { + const data = GetTxRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "GetTx", data); + return promise.then(data => GetTxResponse.decode(new _m0.Reader(data))); + } + + broadcastTx(request: BroadcastTxRequest): Promise { + const data = BroadcastTxRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "BroadcastTx", data); + return promise.then(data => BroadcastTxResponse.decode(new _m0.Reader(data))); + } + + getTxsEvent(request: GetTxsEventRequest): Promise { + const data = GetTxsEventRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "GetTxsEvent", data); + return promise.then(data => GetTxsEventResponse.decode(new _m0.Reader(data))); + } + + getBlockWithTxs(request: GetBlockWithTxsRequest): Promise { + const data = GetBlockWithTxsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "GetBlockWithTxs", data); + return promise.then(data => GetBlockWithTxsResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new ServiceClientImpl(rpc); + return { + simulate(request: SimulateRequest): Promise { + return queryService.simulate(request); + }, + + getTx(request: GetTxRequest): Promise { + return queryService.getTx(request); + }, + + broadcastTx(request: BroadcastTxRequest): Promise { + return queryService.broadcastTx(request); + }, + + getTxsEvent(request: GetTxsEventRequest): Promise { + return queryService.getTxsEvent(request); + }, + + getBlockWithTxs(request: GetBlockWithTxsRequest): Promise { + return queryService.getBlockWithTxs(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/tx/v1beta1/service.ts b/packages/codegen/src/cosmos/tx/v1beta1/service.ts new file mode 100644 index 00000000..cd94819d --- /dev/null +++ b/packages/codegen/src/cosmos/tx/v1beta1/service.ts @@ -0,0 +1,916 @@ +import { Tx, TxSDKType } from "./tx"; +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../base/query/v1beta1/pagination"; +import { TxResponse, TxResponseSDKType, GasInfo, GasInfoSDKType, Result, ResultSDKType } from "../../base/abci/v1beta1/abci"; +import { BlockID, BlockIDSDKType } from "../../../tendermint/types/types"; +import { Block, BlockSDKType } from "../../../tendermint/types/block"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** OrderBy defines the sorting order */ + +export enum OrderBy { + /** ORDER_BY_UNSPECIFIED - ORDER_BY_UNSPECIFIED specifies an unknown sorting order. OrderBy defaults to ASC in this case. */ + ORDER_BY_UNSPECIFIED = 0, + + /** ORDER_BY_ASC - ORDER_BY_ASC defines ascending order */ + ORDER_BY_ASC = 1, + + /** ORDER_BY_DESC - ORDER_BY_DESC defines descending order */ + ORDER_BY_DESC = 2, + UNRECOGNIZED = -1, +} +export const OrderBySDKType = OrderBy; +export function orderByFromJSON(object: any): OrderBy { + switch (object) { + case 0: + case "ORDER_BY_UNSPECIFIED": + return OrderBy.ORDER_BY_UNSPECIFIED; + + case 1: + case "ORDER_BY_ASC": + return OrderBy.ORDER_BY_ASC; + + case 2: + case "ORDER_BY_DESC": + return OrderBy.ORDER_BY_DESC; + + case -1: + case "UNRECOGNIZED": + default: + return OrderBy.UNRECOGNIZED; + } +} +export function orderByToJSON(object: OrderBy): string { + switch (object) { + case OrderBy.ORDER_BY_UNSPECIFIED: + return "ORDER_BY_UNSPECIFIED"; + + case OrderBy.ORDER_BY_ASC: + return "ORDER_BY_ASC"; + + case OrderBy.ORDER_BY_DESC: + return "ORDER_BY_DESC"; + + case OrderBy.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method. */ + +export enum BroadcastMode { + /** BROADCAST_MODE_UNSPECIFIED - zero-value for mode ordering */ + BROADCAST_MODE_UNSPECIFIED = 0, + + /** + * BROADCAST_MODE_BLOCK - BROADCAST_MODE_BLOCK defines a tx broadcasting mode where the client waits for + * the tx to be committed in a block. + */ + BROADCAST_MODE_BLOCK = 1, + + /** + * BROADCAST_MODE_SYNC - BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for + * a CheckTx execution response only. + */ + BROADCAST_MODE_SYNC = 2, + + /** + * BROADCAST_MODE_ASYNC - BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns + * immediately. + */ + BROADCAST_MODE_ASYNC = 3, + UNRECOGNIZED = -1, +} +export const BroadcastModeSDKType = BroadcastMode; +export function broadcastModeFromJSON(object: any): BroadcastMode { + switch (object) { + case 0: + case "BROADCAST_MODE_UNSPECIFIED": + return BroadcastMode.BROADCAST_MODE_UNSPECIFIED; + + case 1: + case "BROADCAST_MODE_BLOCK": + return BroadcastMode.BROADCAST_MODE_BLOCK; + + case 2: + case "BROADCAST_MODE_SYNC": + return BroadcastMode.BROADCAST_MODE_SYNC; + + case 3: + case "BROADCAST_MODE_ASYNC": + return BroadcastMode.BROADCAST_MODE_ASYNC; + + case -1: + case "UNRECOGNIZED": + default: + return BroadcastMode.UNRECOGNIZED; + } +} +export function broadcastModeToJSON(object: BroadcastMode): string { + switch (object) { + case BroadcastMode.BROADCAST_MODE_UNSPECIFIED: + return "BROADCAST_MODE_UNSPECIFIED"; + + case BroadcastMode.BROADCAST_MODE_BLOCK: + return "BROADCAST_MODE_BLOCK"; + + case BroadcastMode.BROADCAST_MODE_SYNC: + return "BROADCAST_MODE_SYNC"; + + case BroadcastMode.BROADCAST_MODE_ASYNC: + return "BROADCAST_MODE_ASYNC"; + + case BroadcastMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * GetTxsEventRequest is the request type for the Service.TxsByEvents + * RPC method. + */ + +export interface GetTxsEventRequest { + /** events is the list of transaction event type. */ + events: string[]; + /** pagination defines a pagination for the request. */ + + pagination?: PageRequest; + orderBy: OrderBy; +} +/** + * GetTxsEventRequest is the request type for the Service.TxsByEvents + * RPC method. + */ + +export interface GetTxsEventRequestSDKType { + events: string[]; + pagination?: PageRequestSDKType; + order_by: OrderBy; +} +/** + * GetTxsEventResponse is the response type for the Service.TxsByEvents + * RPC method. + */ + +export interface GetTxsEventResponse { + /** txs is the list of queried transactions. */ + txs: Tx[]; + /** tx_responses is the list of queried TxResponses. */ + + txResponses: TxResponse[]; + /** pagination defines a pagination for the response. */ + + pagination?: PageResponse; +} +/** + * GetTxsEventResponse is the response type for the Service.TxsByEvents + * RPC method. + */ + +export interface GetTxsEventResponseSDKType { + txs: TxSDKType[]; + tx_responses: TxResponseSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * BroadcastTxRequest is the request type for the Service.BroadcastTxRequest + * RPC method. + */ + +export interface BroadcastTxRequest { + /** tx_bytes is the raw transaction. */ + txBytes: Uint8Array; + mode: BroadcastMode; +} +/** + * BroadcastTxRequest is the request type for the Service.BroadcastTxRequest + * RPC method. + */ + +export interface BroadcastTxRequestSDKType { + tx_bytes: Uint8Array; + mode: BroadcastMode; +} +/** + * BroadcastTxResponse is the response type for the + * Service.BroadcastTx method. + */ + +export interface BroadcastTxResponse { + /** tx_response is the queried TxResponses. */ + txResponse?: TxResponse; +} +/** + * BroadcastTxResponse is the response type for the + * Service.BroadcastTx method. + */ + +export interface BroadcastTxResponseSDKType { + tx_response?: TxResponseSDKType; +} +/** + * SimulateRequest is the request type for the Service.Simulate + * RPC method. + */ + +export interface SimulateRequest { + /** + * tx is the transaction to simulate. + * Deprecated. Send raw tx bytes instead. + */ + + /** @deprecated */ + tx?: Tx; + /** + * tx_bytes is the raw transaction. + * + * Since: cosmos-sdk 0.43 + */ + + txBytes: Uint8Array; +} +/** + * SimulateRequest is the request type for the Service.Simulate + * RPC method. + */ + +export interface SimulateRequestSDKType { + /** @deprecated */ + tx?: TxSDKType; + tx_bytes: Uint8Array; +} +/** + * SimulateResponse is the response type for the + * Service.SimulateRPC method. + */ + +export interface SimulateResponse { + /** gas_info is the information about gas used in the simulation. */ + gasInfo?: GasInfo; + /** result is the result of the simulation. */ + + result?: Result; +} +/** + * SimulateResponse is the response type for the + * Service.SimulateRPC method. + */ + +export interface SimulateResponseSDKType { + gas_info?: GasInfoSDKType; + result?: ResultSDKType; +} +/** + * GetTxRequest is the request type for the Service.GetTx + * RPC method. + */ + +export interface GetTxRequest { + /** hash is the tx hash to query, encoded as a hex string. */ + hash: string; +} +/** + * GetTxRequest is the request type for the Service.GetTx + * RPC method. + */ + +export interface GetTxRequestSDKType { + hash: string; +} +/** GetTxResponse is the response type for the Service.GetTx method. */ + +export interface GetTxResponse { + /** tx is the queried transaction. */ + tx?: Tx; + /** tx_response is the queried TxResponses. */ + + txResponse?: TxResponse; +} +/** GetTxResponse is the response type for the Service.GetTx method. */ + +export interface GetTxResponseSDKType { + tx?: TxSDKType; + tx_response?: TxResponseSDKType; +} +/** + * GetBlockWithTxsRequest is the request type for the Service.GetBlockWithTxs + * RPC method. + * + * Since: cosmos-sdk 0.45.2 + */ + +export interface GetBlockWithTxsRequest { + /** height is the height of the block to query. */ + height: Long; + /** pagination defines a pagination for the request. */ + + pagination?: PageRequest; +} +/** + * GetBlockWithTxsRequest is the request type for the Service.GetBlockWithTxs + * RPC method. + * + * Since: cosmos-sdk 0.45.2 + */ + +export interface GetBlockWithTxsRequestSDKType { + height: Long; + pagination?: PageRequestSDKType; +} +/** + * GetBlockWithTxsResponse is the response type for the Service.GetBlockWithTxs method. + * + * Since: cosmos-sdk 0.45.2 + */ + +export interface GetBlockWithTxsResponse { + /** txs are the transactions in the block. */ + txs: Tx[]; + blockId?: BlockID; + block?: Block; + /** pagination defines a pagination for the response. */ + + pagination?: PageResponse; +} +/** + * GetBlockWithTxsResponse is the response type for the Service.GetBlockWithTxs method. + * + * Since: cosmos-sdk 0.45.2 + */ + +export interface GetBlockWithTxsResponseSDKType { + txs: TxSDKType[]; + block_id?: BlockIDSDKType; + block?: BlockSDKType; + pagination?: PageResponseSDKType; +} + +function createBaseGetTxsEventRequest(): GetTxsEventRequest { + return { + events: [], + pagination: undefined, + orderBy: 0 + }; +} + +export const GetTxsEventRequest = { + encode(message: GetTxsEventRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.events) { + writer.uint32(10).string(v!); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + if (message.orderBy !== 0) { + writer.uint32(24).int32(message.orderBy); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxsEventRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetTxsEventRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.events.push(reader.string()); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + case 3: + message.orderBy = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetTxsEventRequest { + const message = createBaseGetTxsEventRequest(); + message.events = object.events?.map(e => e) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + message.orderBy = object.orderBy ?? 0; + return message; + } + +}; + +function createBaseGetTxsEventResponse(): GetTxsEventResponse { + return { + txs: [], + txResponses: [], + pagination: undefined + }; +} + +export const GetTxsEventResponse = { + encode(message: GetTxsEventResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.txs) { + Tx.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.txResponses) { + TxResponse.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxsEventResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetTxsEventResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.txs.push(Tx.decode(reader, reader.uint32())); + break; + + case 2: + message.txResponses.push(TxResponse.decode(reader, reader.uint32())); + break; + + case 3: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetTxsEventResponse { + const message = createBaseGetTxsEventResponse(); + message.txs = object.txs?.map(e => Tx.fromPartial(e)) || []; + message.txResponses = object.txResponses?.map(e => TxResponse.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseBroadcastTxRequest(): BroadcastTxRequest { + return { + txBytes: new Uint8Array(), + mode: 0 + }; +} + +export const BroadcastTxRequest = { + encode(message: BroadcastTxRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.txBytes.length !== 0) { + writer.uint32(10).bytes(message.txBytes); + } + + if (message.mode !== 0) { + writer.uint32(16).int32(message.mode); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BroadcastTxRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBroadcastTxRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.txBytes = reader.bytes(); + break; + + case 2: + message.mode = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BroadcastTxRequest { + const message = createBaseBroadcastTxRequest(); + message.txBytes = object.txBytes ?? new Uint8Array(); + message.mode = object.mode ?? 0; + return message; + } + +}; + +function createBaseBroadcastTxResponse(): BroadcastTxResponse { + return { + txResponse: undefined + }; +} + +export const BroadcastTxResponse = { + encode(message: BroadcastTxResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.txResponse !== undefined) { + TxResponse.encode(message.txResponse, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BroadcastTxResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBroadcastTxResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.txResponse = TxResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BroadcastTxResponse { + const message = createBaseBroadcastTxResponse(); + message.txResponse = object.txResponse !== undefined && object.txResponse !== null ? TxResponse.fromPartial(object.txResponse) : undefined; + return message; + } + +}; + +function createBaseSimulateRequest(): SimulateRequest { + return { + tx: undefined, + txBytes: new Uint8Array() + }; +} + +export const SimulateRequest = { + encode(message: SimulateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tx !== undefined) { + Tx.encode(message.tx, writer.uint32(10).fork()).ldelim(); + } + + if (message.txBytes.length !== 0) { + writer.uint32(18).bytes(message.txBytes); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SimulateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSimulateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.tx = Tx.decode(reader, reader.uint32()); + break; + + case 2: + message.txBytes = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SimulateRequest { + const message = createBaseSimulateRequest(); + message.tx = object.tx !== undefined && object.tx !== null ? Tx.fromPartial(object.tx) : undefined; + message.txBytes = object.txBytes ?? new Uint8Array(); + return message; + } + +}; + +function createBaseSimulateResponse(): SimulateResponse { + return { + gasInfo: undefined, + result: undefined + }; +} + +export const SimulateResponse = { + encode(message: SimulateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.gasInfo !== undefined) { + GasInfo.encode(message.gasInfo, writer.uint32(10).fork()).ldelim(); + } + + if (message.result !== undefined) { + Result.encode(message.result, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SimulateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSimulateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.gasInfo = GasInfo.decode(reader, reader.uint32()); + break; + + case 2: + message.result = Result.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SimulateResponse { + const message = createBaseSimulateResponse(); + message.gasInfo = object.gasInfo !== undefined && object.gasInfo !== null ? GasInfo.fromPartial(object.gasInfo) : undefined; + message.result = object.result !== undefined && object.result !== null ? Result.fromPartial(object.result) : undefined; + return message; + } + +}; + +function createBaseGetTxRequest(): GetTxRequest { + return { + hash: "" + }; +} + +export const GetTxRequest = { + encode(message: GetTxRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hash !== "") { + writer.uint32(10).string(message.hash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetTxRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.hash = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetTxRequest { + const message = createBaseGetTxRequest(); + message.hash = object.hash ?? ""; + return message; + } + +}; + +function createBaseGetTxResponse(): GetTxResponse { + return { + tx: undefined, + txResponse: undefined + }; +} + +export const GetTxResponse = { + encode(message: GetTxResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tx !== undefined) { + Tx.encode(message.tx, writer.uint32(10).fork()).ldelim(); + } + + if (message.txResponse !== undefined) { + TxResponse.encode(message.txResponse, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetTxResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.tx = Tx.decode(reader, reader.uint32()); + break; + + case 2: + message.txResponse = TxResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetTxResponse { + const message = createBaseGetTxResponse(); + message.tx = object.tx !== undefined && object.tx !== null ? Tx.fromPartial(object.tx) : undefined; + message.txResponse = object.txResponse !== undefined && object.txResponse !== null ? TxResponse.fromPartial(object.txResponse) : undefined; + return message; + } + +}; + +function createBaseGetBlockWithTxsRequest(): GetBlockWithTxsRequest { + return { + height: Long.ZERO, + pagination: undefined + }; +} + +export const GetBlockWithTxsRequest = { + encode(message: GetBlockWithTxsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).int64(message.height); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockWithTxsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetBlockWithTxsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.int64() as Long); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetBlockWithTxsRequest { + const message = createBaseGetBlockWithTxsRequest(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseGetBlockWithTxsResponse(): GetBlockWithTxsResponse { + return { + txs: [], + blockId: undefined, + block: undefined, + pagination: undefined + }; +} + +export const GetBlockWithTxsResponse = { + encode(message: GetBlockWithTxsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.txs) { + Tx.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(18).fork()).ldelim(); + } + + if (message.block !== undefined) { + Block.encode(message.block, writer.uint32(26).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockWithTxsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetBlockWithTxsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.txs.push(Tx.decode(reader, reader.uint32())); + break; + + case 2: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + + case 3: + message.block = Block.decode(reader, reader.uint32()); + break; + + case 4: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GetBlockWithTxsResponse { + const message = createBaseGetBlockWithTxsResponse(); + message.txs = object.txs?.map(e => Tx.fromPartial(e)) || []; + message.blockId = object.blockId !== undefined && object.blockId !== null ? BlockID.fromPartial(object.blockId) : undefined; + message.block = object.block !== undefined && object.block !== null ? Block.fromPartial(object.block) : undefined; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/tx/v1beta1/tx.ts b/packages/codegen/src/cosmos/tx/v1beta1/tx.ts new file mode 100644 index 00000000..a2aa5acd --- /dev/null +++ b/packages/codegen/src/cosmos/tx/v1beta1/tx.ts @@ -0,0 +1,1319 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { SignMode } from "../signing/v1beta1/signing"; +import { CompactBitArray, CompactBitArraySDKType } from "../../crypto/multisig/v1beta1/multisig"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** Tx is the standard type used for broadcasting transactions. */ + +export interface Tx { + /** body is the processable content of the transaction */ + body?: TxBody; + /** + * auth_info is the authorization related content of the transaction, + * specifically signers, signer modes and fee + */ + + authInfo?: AuthInfo; + /** + * signatures is a list of signatures that matches the length and order of + * AuthInfo's signer_infos to allow connecting signature meta information like + * public key and signing mode by position. + */ + + signatures: Uint8Array[]; +} +/** Tx is the standard type used for broadcasting transactions. */ + +export interface TxSDKType { + body?: TxBodySDKType; + auth_info?: AuthInfoSDKType; + signatures: Uint8Array[]; +} +/** + * TxRaw is a variant of Tx that pins the signer's exact binary representation + * of body and auth_info. This is used for signing, broadcasting and + * verification. The binary `serialize(tx: TxRaw)` is stored in Tendermint and + * the hash `sha256(serialize(tx: TxRaw))` becomes the "txhash", commonly used + * as the transaction ID. + */ + +export interface TxRaw { + /** + * body_bytes is a protobuf serialization of a TxBody that matches the + * representation in SignDoc. + */ + bodyBytes: Uint8Array; + /** + * auth_info_bytes is a protobuf serialization of an AuthInfo that matches the + * representation in SignDoc. + */ + + authInfoBytes: Uint8Array; + /** + * signatures is a list of signatures that matches the length and order of + * AuthInfo's signer_infos to allow connecting signature meta information like + * public key and signing mode by position. + */ + + signatures: Uint8Array[]; +} +/** + * TxRaw is a variant of Tx that pins the signer's exact binary representation + * of body and auth_info. This is used for signing, broadcasting and + * verification. The binary `serialize(tx: TxRaw)` is stored in Tendermint and + * the hash `sha256(serialize(tx: TxRaw))` becomes the "txhash", commonly used + * as the transaction ID. + */ + +export interface TxRawSDKType { + body_bytes: Uint8Array; + auth_info_bytes: Uint8Array; + signatures: Uint8Array[]; +} +/** SignDoc is the type used for generating sign bytes for SIGN_MODE_DIRECT. */ + +export interface SignDoc { + /** + * body_bytes is protobuf serialization of a TxBody that matches the + * representation in TxRaw. + */ + bodyBytes: Uint8Array; + /** + * auth_info_bytes is a protobuf serialization of an AuthInfo that matches the + * representation in TxRaw. + */ + + authInfoBytes: Uint8Array; + /** + * chain_id is the unique identifier of the chain this transaction targets. + * It prevents signed transactions from being used on another chain by an + * attacker + */ + + chainId: string; + /** account_number is the account number of the account in state */ + + accountNumber: Long; +} +/** SignDoc is the type used for generating sign bytes for SIGN_MODE_DIRECT. */ + +export interface SignDocSDKType { + body_bytes: Uint8Array; + auth_info_bytes: Uint8Array; + chain_id: string; + account_number: Long; +} +/** + * SignDocDirectAux is the type used for generating sign bytes for + * SIGN_MODE_DIRECT_AUX. + * + * Since: cosmos-sdk 0.46 + */ + +export interface SignDocDirectAux { + /** + * body_bytes is protobuf serialization of a TxBody that matches the + * representation in TxRaw. + */ + bodyBytes: Uint8Array; + /** public_key is the public key of the signing account. */ + + publicKey?: Any; + /** + * chain_id is the identifier of the chain this transaction targets. + * It prevents signed transactions from being used on another chain by an + * attacker. + */ + + chainId: string; + /** account_number is the account number of the account in state. */ + + accountNumber: Long; + /** sequence is the sequence number of the signing account. */ + + sequence: Long; + /** + * Tip is the optional tip used for meta-transactions. It should be left + * empty if the signer is not the tipper for this transaction. + */ + + tip?: Tip; +} +/** + * SignDocDirectAux is the type used for generating sign bytes for + * SIGN_MODE_DIRECT_AUX. + * + * Since: cosmos-sdk 0.46 + */ + +export interface SignDocDirectAuxSDKType { + body_bytes: Uint8Array; + public_key?: AnySDKType; + chain_id: string; + account_number: Long; + sequence: Long; + tip?: TipSDKType; +} +/** TxBody is the body of a transaction that all signers sign over. */ + +export interface TxBody { + /** + * messages is a list of messages to be executed. The required signers of + * those messages define the number and order of elements in AuthInfo's + * signer_infos and Tx's signatures. Each required signer address is added to + * the list only the first time it occurs. + * By convention, the first required signer (usually from the first message) + * is referred to as the primary signer and pays the fee for the whole + * transaction. + */ + messages: Any[]; + /** + * memo is any arbitrary note/comment to be added to the transaction. + * WARNING: in clients, any publicly exposed text should not be called memo, + * but should be called `note` instead (see https://github.com/cosmos/cosmos-sdk/issues/9122). + */ + + memo: string; + /** + * timeout is the block height after which this transaction will not + * be processed by the chain + */ + + timeoutHeight: Long; + /** + * extension_options are arbitrary options that can be added by chains + * when the default options are not sufficient. If any of these are present + * and can't be handled, the transaction will be rejected + */ + + extensionOptions: Any[]; + /** + * extension_options are arbitrary options that can be added by chains + * when the default options are not sufficient. If any of these are present + * and can't be handled, they will be ignored + */ + + nonCriticalExtensionOptions: Any[]; +} +/** TxBody is the body of a transaction that all signers sign over. */ + +export interface TxBodySDKType { + messages: AnySDKType[]; + memo: string; + timeout_height: Long; + extension_options: AnySDKType[]; + non_critical_extension_options: AnySDKType[]; +} +/** + * AuthInfo describes the fee and signer modes that are used to sign a + * transaction. + */ + +export interface AuthInfo { + /** + * signer_infos defines the signing modes for the required signers. The number + * and order of elements must match the required signers from TxBody's + * messages. The first element is the primary signer and the one which pays + * the fee. + */ + signerInfos: SignerInfo[]; + /** + * Fee is the fee and gas limit for the transaction. The first signer is the + * primary signer and the one which pays the fee. The fee can be calculated + * based on the cost of evaluating the body and doing signature verification + * of the signers. This can be estimated via simulation. + */ + + fee?: Fee; + /** + * Tip is the optional tip used for meta-transactions. + * + * Since: cosmos-sdk 0.46 + */ + + tip?: Tip; +} +/** + * AuthInfo describes the fee and signer modes that are used to sign a + * transaction. + */ + +export interface AuthInfoSDKType { + signer_infos: SignerInfoSDKType[]; + fee?: FeeSDKType; + tip?: TipSDKType; +} +/** + * SignerInfo describes the public key and signing mode of a single top-level + * signer. + */ + +export interface SignerInfo { + /** + * public_key is the public key of the signer. It is optional for accounts + * that already exist in state. If unset, the verifier can use the required \ + * signer address for this position and lookup the public key. + */ + publicKey?: Any; + /** + * mode_info describes the signing mode of the signer and is a nested + * structure to support nested multisig pubkey's + */ + + modeInfo?: ModeInfo; + /** + * sequence is the sequence of the account, which describes the + * number of committed transactions signed by a given address. It is used to + * prevent replay attacks. + */ + + sequence: Long; +} +/** + * SignerInfo describes the public key and signing mode of a single top-level + * signer. + */ + +export interface SignerInfoSDKType { + public_key?: AnySDKType; + mode_info?: ModeInfoSDKType; + sequence: Long; +} +/** ModeInfo describes the signing mode of a single or nested multisig signer. */ + +export interface ModeInfo { + /** single represents a single signer */ + single?: ModeInfo_Single; + /** multi represents a nested multisig signer */ + + multi?: ModeInfo_Multi; +} +/** ModeInfo describes the signing mode of a single or nested multisig signer. */ + +export interface ModeInfoSDKType { + single?: ModeInfo_SingleSDKType; + multi?: ModeInfo_MultiSDKType; +} +/** + * Single is the mode info for a single signer. It is structured as a message + * to allow for additional fields such as locale for SIGN_MODE_TEXTUAL in the + * future + */ + +export interface ModeInfo_Single { + /** mode is the signing mode of the single signer */ + mode: SignMode; +} +/** + * Single is the mode info for a single signer. It is structured as a message + * to allow for additional fields such as locale for SIGN_MODE_TEXTUAL in the + * future + */ + +export interface ModeInfo_SingleSDKType { + mode: SignMode; +} +/** Multi is the mode info for a multisig public key */ + +export interface ModeInfo_Multi { + /** bitarray specifies which keys within the multisig are signing */ + bitarray?: CompactBitArray; + /** + * mode_infos is the corresponding modes of the signers of the multisig + * which could include nested multisig public keys + */ + + modeInfos: ModeInfo[]; +} +/** Multi is the mode info for a multisig public key */ + +export interface ModeInfo_MultiSDKType { + bitarray?: CompactBitArraySDKType; + mode_infos: ModeInfoSDKType[]; +} +/** + * Fee includes the amount of coins paid in fees and the maximum + * gas to be used by the transaction. The ratio yields an effective "gasprice", + * which must be above some miminum to be accepted into the mempool. + */ + +export interface Fee { + /** amount is the amount of coins to be paid as a fee */ + amount: Coin[]; + /** + * gas_limit is the maximum gas that can be used in transaction processing + * before an out of gas error occurs + */ + + gasLimit: Long; + /** + * if unset, the first signer is responsible for paying the fees. If set, the specified account must pay the fees. + * the payer must be a tx signer (and thus have signed this field in AuthInfo). + * setting this field does *not* change the ordering of required signers for the transaction. + */ + + payer: string; + /** + * if set, the fee payer (either the first signer or the value of the payer field) requests that a fee grant be used + * to pay fees instead of the fee payer's own balance. If an appropriate fee grant does not exist or the chain does + * not support fee grants, this will fail + */ + + granter: string; +} +/** + * Fee includes the amount of coins paid in fees and the maximum + * gas to be used by the transaction. The ratio yields an effective "gasprice", + * which must be above some miminum to be accepted into the mempool. + */ + +export interface FeeSDKType { + amount: CoinSDKType[]; + gas_limit: Long; + payer: string; + granter: string; +} +/** + * Tip is the tip used for meta-transactions. + * + * Since: cosmos-sdk 0.46 + */ + +export interface Tip { + /** amount is the amount of the tip */ + amount: Coin[]; + /** tipper is the address of the account paying for the tip */ + + tipper: string; +} +/** + * Tip is the tip used for meta-transactions. + * + * Since: cosmos-sdk 0.46 + */ + +export interface TipSDKType { + amount: CoinSDKType[]; + tipper: string; +} +/** + * AuxSignerData is the intermediary format that an auxiliary signer (e.g. a + * tipper) builds and sends to the fee payer (who will build and broadcast the + * actual tx). AuxSignerData is not a valid tx in itself, and will be rejected + * by the node if sent directly as-is. + * + * Since: cosmos-sdk 0.46 + */ + +export interface AuxSignerData { + /** + * address is the bech32-encoded address of the auxiliary signer. If using + * AuxSignerData across different chains, the bech32 prefix of the target + * chain (where the final transaction is broadcasted) should be used. + */ + address: string; + /** + * sign_doc is the SIGN_MOD_DIRECT_AUX sign doc that the auxiliary signer + * signs. Note: we use the same sign doc even if we're signing with + * LEGACY_AMINO_JSON. + */ + + signDoc?: SignDocDirectAux; + /** mode is the signing mode of the single signer */ + + mode: SignMode; + /** sig is the signature of the sign doc. */ + + sig: Uint8Array; +} +/** + * AuxSignerData is the intermediary format that an auxiliary signer (e.g. a + * tipper) builds and sends to the fee payer (who will build and broadcast the + * actual tx). AuxSignerData is not a valid tx in itself, and will be rejected + * by the node if sent directly as-is. + * + * Since: cosmos-sdk 0.46 + */ + +export interface AuxSignerDataSDKType { + address: string; + sign_doc?: SignDocDirectAuxSDKType; + mode: SignMode; + sig: Uint8Array; +} + +function createBaseTx(): Tx { + return { + body: undefined, + authInfo: undefined, + signatures: [] + }; +} + +export const Tx = { + encode(message: Tx, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.body !== undefined) { + TxBody.encode(message.body, writer.uint32(10).fork()).ldelim(); + } + + if (message.authInfo !== undefined) { + AuthInfo.encode(message.authInfo, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.signatures) { + writer.uint32(26).bytes(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Tx { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTx(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.body = TxBody.decode(reader, reader.uint32()); + break; + + case 2: + message.authInfo = AuthInfo.decode(reader, reader.uint32()); + break; + + case 3: + message.signatures.push(reader.bytes()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Tx { + const message = createBaseTx(); + message.body = object.body !== undefined && object.body !== null ? TxBody.fromPartial(object.body) : undefined; + message.authInfo = object.authInfo !== undefined && object.authInfo !== null ? AuthInfo.fromPartial(object.authInfo) : undefined; + message.signatures = object.signatures?.map(e => e) || []; + return message; + } + +}; + +function createBaseTxRaw(): TxRaw { + return { + bodyBytes: new Uint8Array(), + authInfoBytes: new Uint8Array(), + signatures: [] + }; +} + +export const TxRaw = { + encode(message: TxRaw, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bodyBytes.length !== 0) { + writer.uint32(10).bytes(message.bodyBytes); + } + + if (message.authInfoBytes.length !== 0) { + writer.uint32(18).bytes(message.authInfoBytes); + } + + for (const v of message.signatures) { + writer.uint32(26).bytes(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxRaw { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxRaw(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.bodyBytes = reader.bytes(); + break; + + case 2: + message.authInfoBytes = reader.bytes(); + break; + + case 3: + message.signatures.push(reader.bytes()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TxRaw { + const message = createBaseTxRaw(); + message.bodyBytes = object.bodyBytes ?? new Uint8Array(); + message.authInfoBytes = object.authInfoBytes ?? new Uint8Array(); + message.signatures = object.signatures?.map(e => e) || []; + return message; + } + +}; + +function createBaseSignDoc(): SignDoc { + return { + bodyBytes: new Uint8Array(), + authInfoBytes: new Uint8Array(), + chainId: "", + accountNumber: Long.UZERO + }; +} + +export const SignDoc = { + encode(message: SignDoc, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bodyBytes.length !== 0) { + writer.uint32(10).bytes(message.bodyBytes); + } + + if (message.authInfoBytes.length !== 0) { + writer.uint32(18).bytes(message.authInfoBytes); + } + + if (message.chainId !== "") { + writer.uint32(26).string(message.chainId); + } + + if (!message.accountNumber.isZero()) { + writer.uint32(32).uint64(message.accountNumber); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignDoc { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignDoc(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.bodyBytes = reader.bytes(); + break; + + case 2: + message.authInfoBytes = reader.bytes(); + break; + + case 3: + message.chainId = reader.string(); + break; + + case 4: + message.accountNumber = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignDoc { + const message = createBaseSignDoc(); + message.bodyBytes = object.bodyBytes ?? new Uint8Array(); + message.authInfoBytes = object.authInfoBytes ?? new Uint8Array(); + message.chainId = object.chainId ?? ""; + message.accountNumber = object.accountNumber !== undefined && object.accountNumber !== null ? Long.fromValue(object.accountNumber) : Long.UZERO; + return message; + } + +}; + +function createBaseSignDocDirectAux(): SignDocDirectAux { + return { + bodyBytes: new Uint8Array(), + publicKey: undefined, + chainId: "", + accountNumber: Long.UZERO, + sequence: Long.UZERO, + tip: undefined + }; +} + +export const SignDocDirectAux = { + encode(message: SignDocDirectAux, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bodyBytes.length !== 0) { + writer.uint32(10).bytes(message.bodyBytes); + } + + if (message.publicKey !== undefined) { + Any.encode(message.publicKey, writer.uint32(18).fork()).ldelim(); + } + + if (message.chainId !== "") { + writer.uint32(26).string(message.chainId); + } + + if (!message.accountNumber.isZero()) { + writer.uint32(32).uint64(message.accountNumber); + } + + if (!message.sequence.isZero()) { + writer.uint32(40).uint64(message.sequence); + } + + if (message.tip !== undefined) { + Tip.encode(message.tip, writer.uint32(50).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignDocDirectAux { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignDocDirectAux(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.bodyBytes = reader.bytes(); + break; + + case 2: + message.publicKey = Any.decode(reader, reader.uint32()); + break; + + case 3: + message.chainId = reader.string(); + break; + + case 4: + message.accountNumber = (reader.uint64() as Long); + break; + + case 5: + message.sequence = (reader.uint64() as Long); + break; + + case 6: + message.tip = Tip.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignDocDirectAux { + const message = createBaseSignDocDirectAux(); + message.bodyBytes = object.bodyBytes ?? new Uint8Array(); + message.publicKey = object.publicKey !== undefined && object.publicKey !== null ? Any.fromPartial(object.publicKey) : undefined; + message.chainId = object.chainId ?? ""; + message.accountNumber = object.accountNumber !== undefined && object.accountNumber !== null ? Long.fromValue(object.accountNumber) : Long.UZERO; + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + message.tip = object.tip !== undefined && object.tip !== null ? Tip.fromPartial(object.tip) : undefined; + return message; + } + +}; + +function createBaseTxBody(): TxBody { + return { + messages: [], + memo: "", + timeoutHeight: Long.UZERO, + extensionOptions: [], + nonCriticalExtensionOptions: [] + }; +} + +export const TxBody = { + encode(message: TxBody, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.messages) { + Any.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.memo !== "") { + writer.uint32(18).string(message.memo); + } + + if (!message.timeoutHeight.isZero()) { + writer.uint32(24).uint64(message.timeoutHeight); + } + + for (const v of message.extensionOptions) { + Any.encode(v!, writer.uint32(8186).fork()).ldelim(); + } + + for (const v of message.nonCriticalExtensionOptions) { + Any.encode(v!, writer.uint32(16378).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxBody { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxBody(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.messages.push(Any.decode(reader, reader.uint32())); + break; + + case 2: + message.memo = reader.string(); + break; + + case 3: + message.timeoutHeight = (reader.uint64() as Long); + break; + + case 1023: + message.extensionOptions.push(Any.decode(reader, reader.uint32())); + break; + + case 2047: + message.nonCriticalExtensionOptions.push(Any.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TxBody { + const message = createBaseTxBody(); + message.messages = object.messages?.map(e => Any.fromPartial(e)) || []; + message.memo = object.memo ?? ""; + message.timeoutHeight = object.timeoutHeight !== undefined && object.timeoutHeight !== null ? Long.fromValue(object.timeoutHeight) : Long.UZERO; + message.extensionOptions = object.extensionOptions?.map(e => Any.fromPartial(e)) || []; + message.nonCriticalExtensionOptions = object.nonCriticalExtensionOptions?.map(e => Any.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseAuthInfo(): AuthInfo { + return { + signerInfos: [], + fee: undefined, + tip: undefined + }; +} + +export const AuthInfo = { + encode(message: AuthInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.signerInfos) { + SignerInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.fee !== undefined) { + Fee.encode(message.fee, writer.uint32(18).fork()).ldelim(); + } + + if (message.tip !== undefined) { + Tip.encode(message.tip, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AuthInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAuthInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.signerInfos.push(SignerInfo.decode(reader, reader.uint32())); + break; + + case 2: + message.fee = Fee.decode(reader, reader.uint32()); + break; + + case 3: + message.tip = Tip.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AuthInfo { + const message = createBaseAuthInfo(); + message.signerInfos = object.signerInfos?.map(e => SignerInfo.fromPartial(e)) || []; + message.fee = object.fee !== undefined && object.fee !== null ? Fee.fromPartial(object.fee) : undefined; + message.tip = object.tip !== undefined && object.tip !== null ? Tip.fromPartial(object.tip) : undefined; + return message; + } + +}; + +function createBaseSignerInfo(): SignerInfo { + return { + publicKey: undefined, + modeInfo: undefined, + sequence: Long.UZERO + }; +} + +export const SignerInfo = { + encode(message: SignerInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.publicKey !== undefined) { + Any.encode(message.publicKey, writer.uint32(10).fork()).ldelim(); + } + + if (message.modeInfo !== undefined) { + ModeInfo.encode(message.modeInfo, writer.uint32(18).fork()).ldelim(); + } + + if (!message.sequence.isZero()) { + writer.uint32(24).uint64(message.sequence); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignerInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignerInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.publicKey = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.modeInfo = ModeInfo.decode(reader, reader.uint32()); + break; + + case 3: + message.sequence = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignerInfo { + const message = createBaseSignerInfo(); + message.publicKey = object.publicKey !== undefined && object.publicKey !== null ? Any.fromPartial(object.publicKey) : undefined; + message.modeInfo = object.modeInfo !== undefined && object.modeInfo !== null ? ModeInfo.fromPartial(object.modeInfo) : undefined; + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + return message; + } + +}; + +function createBaseModeInfo(): ModeInfo { + return { + single: undefined, + multi: undefined + }; +} + +export const ModeInfo = { + encode(message: ModeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.single !== undefined) { + ModeInfo_Single.encode(message.single, writer.uint32(10).fork()).ldelim(); + } + + if (message.multi !== undefined) { + ModeInfo_Multi.encode(message.multi, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModeInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.single = ModeInfo_Single.decode(reader, reader.uint32()); + break; + + case 2: + message.multi = ModeInfo_Multi.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ModeInfo { + const message = createBaseModeInfo(); + message.single = object.single !== undefined && object.single !== null ? ModeInfo_Single.fromPartial(object.single) : undefined; + message.multi = object.multi !== undefined && object.multi !== null ? ModeInfo_Multi.fromPartial(object.multi) : undefined; + return message; + } + +}; + +function createBaseModeInfo_Single(): ModeInfo_Single { + return { + mode: 0 + }; +} + +export const ModeInfo_Single = { + encode(message: ModeInfo_Single, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.mode !== 0) { + writer.uint32(8).int32(message.mode); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModeInfo_Single { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModeInfo_Single(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.mode = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ModeInfo_Single { + const message = createBaseModeInfo_Single(); + message.mode = object.mode ?? 0; + return message; + } + +}; + +function createBaseModeInfo_Multi(): ModeInfo_Multi { + return { + bitarray: undefined, + modeInfos: [] + }; +} + +export const ModeInfo_Multi = { + encode(message: ModeInfo_Multi, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bitarray !== undefined) { + CompactBitArray.encode(message.bitarray, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.modeInfos) { + ModeInfo.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModeInfo_Multi { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModeInfo_Multi(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.bitarray = CompactBitArray.decode(reader, reader.uint32()); + break; + + case 2: + message.modeInfos.push(ModeInfo.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ModeInfo_Multi { + const message = createBaseModeInfo_Multi(); + message.bitarray = object.bitarray !== undefined && object.bitarray !== null ? CompactBitArray.fromPartial(object.bitarray) : undefined; + message.modeInfos = object.modeInfos?.map(e => ModeInfo.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseFee(): Fee { + return { + amount: [], + gasLimit: Long.UZERO, + payer: "", + granter: "" + }; +} + +export const Fee = { + encode(message: Fee, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (!message.gasLimit.isZero()) { + writer.uint32(16).uint64(message.gasLimit); + } + + if (message.payer !== "") { + writer.uint32(26).string(message.payer); + } + + if (message.granter !== "") { + writer.uint32(34).string(message.granter); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Fee { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFee(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + case 2: + message.gasLimit = (reader.uint64() as Long); + break; + + case 3: + message.payer = reader.string(); + break; + + case 4: + message.granter = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Fee { + const message = createBaseFee(); + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + message.gasLimit = object.gasLimit !== undefined && object.gasLimit !== null ? Long.fromValue(object.gasLimit) : Long.UZERO; + message.payer = object.payer ?? ""; + message.granter = object.granter ?? ""; + return message; + } + +}; + +function createBaseTip(): Tip { + return { + amount: [], + tipper: "" + }; +} + +export const Tip = { + encode(message: Tip, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.tipper !== "") { + writer.uint32(18).string(message.tipper); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Tip { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTip(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + case 2: + message.tipper = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Tip { + const message = createBaseTip(); + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + message.tipper = object.tipper ?? ""; + return message; + } + +}; + +function createBaseAuxSignerData(): AuxSignerData { + return { + address: "", + signDoc: undefined, + mode: 0, + sig: new Uint8Array() + }; +} + +export const AuxSignerData = { + encode(message: AuxSignerData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.signDoc !== undefined) { + SignDocDirectAux.encode(message.signDoc, writer.uint32(18).fork()).ldelim(); + } + + if (message.mode !== 0) { + writer.uint32(24).int32(message.mode); + } + + if (message.sig.length !== 0) { + writer.uint32(34).bytes(message.sig); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AuxSignerData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAuxSignerData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.signDoc = SignDocDirectAux.decode(reader, reader.uint32()); + break; + + case 3: + message.mode = (reader.int32() as any); + break; + + case 4: + message.sig = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AuxSignerData { + const message = createBaseAuxSignerData(); + message.address = object.address ?? ""; + message.signDoc = object.signDoc !== undefined && object.signDoc !== null ? SignDocDirectAux.fromPartial(object.signDoc) : undefined; + message.mode = object.mode ?? 0; + message.sig = object.sig ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/upgrade/v1beta1/query.lcd.ts b/packages/codegen/src/cosmos/upgrade/v1beta1/query.lcd.ts new file mode 100644 index 00000000..36a7cf66 --- /dev/null +++ b/packages/codegen/src/cosmos/upgrade/v1beta1/query.lcd.ts @@ -0,0 +1,69 @@ +import { LCDClient } from "@osmonauts/lcd"; +import { QueryCurrentPlanRequest, QueryCurrentPlanResponseSDKType, QueryAppliedPlanRequest, QueryAppliedPlanResponseSDKType, QueryUpgradedConsensusStateRequest, QueryUpgradedConsensusStateResponseSDKType, QueryModuleVersionsRequest, QueryModuleVersionsResponseSDKType, QueryAuthorityRequest, QueryAuthorityResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.currentPlan = this.currentPlan.bind(this); + this.appliedPlan = this.appliedPlan.bind(this); + this.upgradedConsensusState = this.upgradedConsensusState.bind(this); + this.moduleVersions = this.moduleVersions.bind(this); + this.authority = this.authority.bind(this); + } + /* CurrentPlan queries the current upgrade plan. */ + + + async currentPlan(_params: QueryCurrentPlanRequest = {}): Promise { + const endpoint = `cosmos/upgrade/v1beta1/current_plan`; + return await this.req.get(endpoint); + } + /* AppliedPlan queries a previously applied upgrade plan by its name. */ + + + async appliedPlan(params: QueryAppliedPlanRequest): Promise { + const endpoint = `cosmos/upgrade/v1beta1/applied_plan/${params.name}`; + return await this.req.get(endpoint); + } + /* UpgradedConsensusState queries the consensus state that will serve + as a trusted kernel for the next version of this chain. It will only be + stored at the last height of this chain. + UpgradedConsensusState RPC not supported with legacy querier + This rpc is deprecated now that IBC has its own replacement + (https://github.com/cosmos/ibc-go/blob/2c880a22e9f9cc75f62b527ca94aa75ce1106001/proto/ibc/core/client/v1/query.proto#L54) */ + + + async upgradedConsensusState(params: QueryUpgradedConsensusStateRequest): Promise { + const endpoint = `cosmos/upgrade/v1beta1/upgraded_consensus_state/${params.lastHeight}`; + return await this.req.get(endpoint); + } + /* ModuleVersions queries the list of module versions from state. + + Since: cosmos-sdk 0.43 */ + + + async moduleVersions(params: QueryModuleVersionsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.moduleName !== "undefined") { + options.params.module_name = params.moduleName; + } + + const endpoint = `cosmos/upgrade/v1beta1/module_versions`; + return await this.req.get(endpoint, options); + } + /* Returns the account with authority to conduct upgrades */ + + + async authority(_params: QueryAuthorityRequest = {}): Promise { + const endpoint = `cosmos/upgrade/v1beta1/authority`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/upgrade/v1beta1/query.rpc.Query.ts b/packages/codegen/src/cosmos/upgrade/v1beta1/query.rpc.Query.ts new file mode 100644 index 00000000..d2cea4ed --- /dev/null +++ b/packages/codegen/src/cosmos/upgrade/v1beta1/query.rpc.Query.ts @@ -0,0 +1,102 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryCurrentPlanRequest, QueryCurrentPlanResponse, QueryAppliedPlanRequest, QueryAppliedPlanResponse, QueryUpgradedConsensusStateRequest, QueryUpgradedConsensusStateResponse, QueryModuleVersionsRequest, QueryModuleVersionsResponse, QueryAuthorityRequest, QueryAuthorityResponse } from "./query"; +/** Query defines the gRPC upgrade querier service. */ + +export interface Query { + /** CurrentPlan queries the current upgrade plan. */ + currentPlan(request?: QueryCurrentPlanRequest): Promise; + /** AppliedPlan queries a previously applied upgrade plan by its name. */ + + appliedPlan(request: QueryAppliedPlanRequest): Promise; + /** + * UpgradedConsensusState queries the consensus state that will serve + * as a trusted kernel for the next version of this chain. It will only be + * stored at the last height of this chain. + * UpgradedConsensusState RPC not supported with legacy querier + * This rpc is deprecated now that IBC has its own replacement + * (https://github.com/cosmos/ibc-go/blob/2c880a22e9f9cc75f62b527ca94aa75ce1106001/proto/ibc/core/client/v1/query.proto#L54) + */ + + upgradedConsensusState(request: QueryUpgradedConsensusStateRequest): Promise; + /** + * ModuleVersions queries the list of module versions from state. + * + * Since: cosmos-sdk 0.43 + */ + + moduleVersions(request: QueryModuleVersionsRequest): Promise; + /** Returns the account with authority to conduct upgrades */ + + authority(request?: QueryAuthorityRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.currentPlan = this.currentPlan.bind(this); + this.appliedPlan = this.appliedPlan.bind(this); + this.upgradedConsensusState = this.upgradedConsensusState.bind(this); + this.moduleVersions = this.moduleVersions.bind(this); + this.authority = this.authority.bind(this); + } + + currentPlan(request: QueryCurrentPlanRequest = {}): Promise { + const data = QueryCurrentPlanRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Query", "CurrentPlan", data); + return promise.then(data => QueryCurrentPlanResponse.decode(new _m0.Reader(data))); + } + + appliedPlan(request: QueryAppliedPlanRequest): Promise { + const data = QueryAppliedPlanRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Query", "AppliedPlan", data); + return promise.then(data => QueryAppliedPlanResponse.decode(new _m0.Reader(data))); + } + + upgradedConsensusState(request: QueryUpgradedConsensusStateRequest): Promise { + const data = QueryUpgradedConsensusStateRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Query", "UpgradedConsensusState", data); + return promise.then(data => QueryUpgradedConsensusStateResponse.decode(new _m0.Reader(data))); + } + + moduleVersions(request: QueryModuleVersionsRequest): Promise { + const data = QueryModuleVersionsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Query", "ModuleVersions", data); + return promise.then(data => QueryModuleVersionsResponse.decode(new _m0.Reader(data))); + } + + authority(request: QueryAuthorityRequest = {}): Promise { + const data = QueryAuthorityRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Query", "Authority", data); + return promise.then(data => QueryAuthorityResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + currentPlan(request?: QueryCurrentPlanRequest): Promise { + return queryService.currentPlan(request); + }, + + appliedPlan(request: QueryAppliedPlanRequest): Promise { + return queryService.appliedPlan(request); + }, + + upgradedConsensusState(request: QueryUpgradedConsensusStateRequest): Promise { + return queryService.upgradedConsensusState(request); + }, + + moduleVersions(request: QueryModuleVersionsRequest): Promise { + return queryService.moduleVersions(request); + }, + + authority(request?: QueryAuthorityRequest): Promise { + return queryService.authority(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/upgrade/v1beta1/query.ts b/packages/codegen/src/cosmos/upgrade/v1beta1/query.ts new file mode 100644 index 00000000..e676c1d2 --- /dev/null +++ b/packages/codegen/src/cosmos/upgrade/v1beta1/query.ts @@ -0,0 +1,617 @@ +import { Plan, PlanSDKType, ModuleVersion, ModuleVersionSDKType } from "./upgrade"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** + * QueryCurrentPlanRequest is the request type for the Query/CurrentPlan RPC + * method. + */ + +export interface QueryCurrentPlanRequest {} +/** + * QueryCurrentPlanRequest is the request type for the Query/CurrentPlan RPC + * method. + */ + +export interface QueryCurrentPlanRequestSDKType {} +/** + * QueryCurrentPlanResponse is the response type for the Query/CurrentPlan RPC + * method. + */ + +export interface QueryCurrentPlanResponse { + /** plan is the current upgrade plan. */ + plan?: Plan; +} +/** + * QueryCurrentPlanResponse is the response type for the Query/CurrentPlan RPC + * method. + */ + +export interface QueryCurrentPlanResponseSDKType { + plan?: PlanSDKType; +} +/** + * QueryCurrentPlanRequest is the request type for the Query/AppliedPlan RPC + * method. + */ + +export interface QueryAppliedPlanRequest { + /** name is the name of the applied plan to query for. */ + name: string; +} +/** + * QueryCurrentPlanRequest is the request type for the Query/AppliedPlan RPC + * method. + */ + +export interface QueryAppliedPlanRequestSDKType { + name: string; +} +/** + * QueryAppliedPlanResponse is the response type for the Query/AppliedPlan RPC + * method. + */ + +export interface QueryAppliedPlanResponse { + /** height is the block height at which the plan was applied. */ + height: Long; +} +/** + * QueryAppliedPlanResponse is the response type for the Query/AppliedPlan RPC + * method. + */ + +export interface QueryAppliedPlanResponseSDKType { + height: Long; +} +/** + * QueryUpgradedConsensusStateRequest is the request type for the Query/UpgradedConsensusState + * RPC method. + */ + +/** @deprecated */ + +export interface QueryUpgradedConsensusStateRequest { + /** + * last height of the current chain must be sent in request + * as this is the height under which next consensus state is stored + */ + lastHeight: Long; +} +/** + * QueryUpgradedConsensusStateRequest is the request type for the Query/UpgradedConsensusState + * RPC method. + */ + +/** @deprecated */ + +export interface QueryUpgradedConsensusStateRequestSDKType { + last_height: Long; +} +/** + * QueryUpgradedConsensusStateResponse is the response type for the Query/UpgradedConsensusState + * RPC method. + */ + +/** @deprecated */ + +export interface QueryUpgradedConsensusStateResponse { + /** Since: cosmos-sdk 0.43 */ + upgradedConsensusState: Uint8Array; +} +/** + * QueryUpgradedConsensusStateResponse is the response type for the Query/UpgradedConsensusState + * RPC method. + */ + +/** @deprecated */ + +export interface QueryUpgradedConsensusStateResponseSDKType { + upgraded_consensus_state: Uint8Array; +} +/** + * QueryModuleVersionsRequest is the request type for the Query/ModuleVersions + * RPC method. + * + * Since: cosmos-sdk 0.43 + */ + +export interface QueryModuleVersionsRequest { + /** + * module_name is a field to query a specific module + * consensus version from state. Leaving this empty will + * fetch the full list of module versions from state + */ + moduleName: string; +} +/** + * QueryModuleVersionsRequest is the request type for the Query/ModuleVersions + * RPC method. + * + * Since: cosmos-sdk 0.43 + */ + +export interface QueryModuleVersionsRequestSDKType { + module_name: string; +} +/** + * QueryModuleVersionsResponse is the response type for the Query/ModuleVersions + * RPC method. + * + * Since: cosmos-sdk 0.43 + */ + +export interface QueryModuleVersionsResponse { + /** module_versions is a list of module names with their consensus versions. */ + moduleVersions: ModuleVersion[]; +} +/** + * QueryModuleVersionsResponse is the response type for the Query/ModuleVersions + * RPC method. + * + * Since: cosmos-sdk 0.43 + */ + +export interface QueryModuleVersionsResponseSDKType { + module_versions: ModuleVersionSDKType[]; +} +/** + * QueryAuthorityRequest is the request type for Query/Authority + * + * Since: cosmos-sdk 0.46 + */ + +export interface QueryAuthorityRequest {} +/** + * QueryAuthorityRequest is the request type for Query/Authority + * + * Since: cosmos-sdk 0.46 + */ + +export interface QueryAuthorityRequestSDKType {} +/** + * QueryAuthorityResponse is the response type for Query/Authority + * + * Since: cosmos-sdk 0.46 + */ + +export interface QueryAuthorityResponse { + address: string; +} +/** + * QueryAuthorityResponse is the response type for Query/Authority + * + * Since: cosmos-sdk 0.46 + */ + +export interface QueryAuthorityResponseSDKType { + address: string; +} + +function createBaseQueryCurrentPlanRequest(): QueryCurrentPlanRequest { + return {}; +} + +export const QueryCurrentPlanRequest = { + encode(_: QueryCurrentPlanRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCurrentPlanRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCurrentPlanRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryCurrentPlanRequest { + const message = createBaseQueryCurrentPlanRequest(); + return message; + } + +}; + +function createBaseQueryCurrentPlanResponse(): QueryCurrentPlanResponse { + return { + plan: undefined + }; +} + +export const QueryCurrentPlanResponse = { + encode(message: QueryCurrentPlanResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCurrentPlanResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCurrentPlanResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.plan = Plan.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryCurrentPlanResponse { + const message = createBaseQueryCurrentPlanResponse(); + message.plan = object.plan !== undefined && object.plan !== null ? Plan.fromPartial(object.plan) : undefined; + return message; + } + +}; + +function createBaseQueryAppliedPlanRequest(): QueryAppliedPlanRequest { + return { + name: "" + }; +} + +export const QueryAppliedPlanRequest = { + encode(message: QueryAppliedPlanRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAppliedPlanRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAppliedPlanRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAppliedPlanRequest { + const message = createBaseQueryAppliedPlanRequest(); + message.name = object.name ?? ""; + return message; + } + +}; + +function createBaseQueryAppliedPlanResponse(): QueryAppliedPlanResponse { + return { + height: Long.ZERO + }; +} + +export const QueryAppliedPlanResponse = { + encode(message: QueryAppliedPlanResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).int64(message.height); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAppliedPlanResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAppliedPlanResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAppliedPlanResponse { + const message = createBaseQueryAppliedPlanResponse(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + return message; + } + +}; + +function createBaseQueryUpgradedConsensusStateRequest(): QueryUpgradedConsensusStateRequest { + return { + lastHeight: Long.ZERO + }; +} + +export const QueryUpgradedConsensusStateRequest = { + encode(message: QueryUpgradedConsensusStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.lastHeight.isZero()) { + writer.uint32(8).int64(message.lastHeight); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedConsensusStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUpgradedConsensusStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.lastHeight = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryUpgradedConsensusStateRequest { + const message = createBaseQueryUpgradedConsensusStateRequest(); + message.lastHeight = object.lastHeight !== undefined && object.lastHeight !== null ? Long.fromValue(object.lastHeight) : Long.ZERO; + return message; + } + +}; + +function createBaseQueryUpgradedConsensusStateResponse(): QueryUpgradedConsensusStateResponse { + return { + upgradedConsensusState: new Uint8Array() + }; +} + +export const QueryUpgradedConsensusStateResponse = { + encode(message: QueryUpgradedConsensusStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.upgradedConsensusState.length !== 0) { + writer.uint32(18).bytes(message.upgradedConsensusState); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedConsensusStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUpgradedConsensusStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 2: + message.upgradedConsensusState = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryUpgradedConsensusStateResponse { + const message = createBaseQueryUpgradedConsensusStateResponse(); + message.upgradedConsensusState = object.upgradedConsensusState ?? new Uint8Array(); + return message; + } + +}; + +function createBaseQueryModuleVersionsRequest(): QueryModuleVersionsRequest { + return { + moduleName: "" + }; +} + +export const QueryModuleVersionsRequest = { + encode(message: QueryModuleVersionsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.moduleName !== "") { + writer.uint32(10).string(message.moduleName); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleVersionsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryModuleVersionsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.moduleName = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryModuleVersionsRequest { + const message = createBaseQueryModuleVersionsRequest(); + message.moduleName = object.moduleName ?? ""; + return message; + } + +}; + +function createBaseQueryModuleVersionsResponse(): QueryModuleVersionsResponse { + return { + moduleVersions: [] + }; +} + +export const QueryModuleVersionsResponse = { + encode(message: QueryModuleVersionsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.moduleVersions) { + ModuleVersion.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleVersionsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryModuleVersionsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.moduleVersions.push(ModuleVersion.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryModuleVersionsResponse { + const message = createBaseQueryModuleVersionsResponse(); + message.moduleVersions = object.moduleVersions?.map(e => ModuleVersion.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseQueryAuthorityRequest(): QueryAuthorityRequest { + return {}; +} + +export const QueryAuthorityRequest = { + encode(_: QueryAuthorityRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAuthorityRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAuthorityRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryAuthorityRequest { + const message = createBaseQueryAuthorityRequest(); + return message; + } + +}; + +function createBaseQueryAuthorityResponse(): QueryAuthorityResponse { + return { + address: "" + }; +} + +export const QueryAuthorityResponse = { + encode(message: QueryAuthorityResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAuthorityResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAuthorityResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAuthorityResponse { + const message = createBaseQueryAuthorityResponse(); + message.address = object.address ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/upgrade/v1beta1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/upgrade/v1beta1/tx.rpc.msg.ts new file mode 100644 index 00000000..901bdbd0 --- /dev/null +++ b/packages/codegen/src/cosmos/upgrade/v1beta1/tx.rpc.msg.ts @@ -0,0 +1,43 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgSoftwareUpgrade, MsgSoftwareUpgradeResponse, MsgCancelUpgrade, MsgCancelUpgradeResponse } from "./tx"; +/** Msg defines the upgrade Msg service. */ + +export interface Msg { + /** + * SoftwareUpgrade is a governance operation for initiating a software upgrade. + * + * Since: cosmos-sdk 0.46 + */ + softwareUpgrade(request: MsgSoftwareUpgrade): Promise; + /** + * CancelUpgrade is a governance operation for cancelling a previously + * approvid software upgrade. + * + * Since: cosmos-sdk 0.46 + */ + + cancelUpgrade(request: MsgCancelUpgrade): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.softwareUpgrade = this.softwareUpgrade.bind(this); + this.cancelUpgrade = this.cancelUpgrade.bind(this); + } + + softwareUpgrade(request: MsgSoftwareUpgrade): Promise { + const data = MsgSoftwareUpgrade.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Msg", "SoftwareUpgrade", data); + return promise.then(data => MsgSoftwareUpgradeResponse.decode(new _m0.Reader(data))); + } + + cancelUpgrade(request: MsgCancelUpgrade): Promise { + const data = MsgCancelUpgrade.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Msg", "CancelUpgrade", data); + return promise.then(data => MsgCancelUpgradeResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/upgrade/v1beta1/tx.ts b/packages/codegen/src/cosmos/upgrade/v1beta1/tx.ts new file mode 100644 index 00000000..def2c4c4 --- /dev/null +++ b/packages/codegen/src/cosmos/upgrade/v1beta1/tx.ts @@ -0,0 +1,241 @@ +import { Plan, PlanSDKType } from "./upgrade"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../helpers"; +/** + * MsgSoftwareUpgrade is the Msg/SoftwareUpgrade request type. + * + * Since: cosmos-sdk 0.46 + */ + +export interface MsgSoftwareUpgrade { + /** authority is the address of the governance account. */ + authority: string; + /** plan is the upgrade plan. */ + + plan?: Plan; +} +/** + * MsgSoftwareUpgrade is the Msg/SoftwareUpgrade request type. + * + * Since: cosmos-sdk 0.46 + */ + +export interface MsgSoftwareUpgradeSDKType { + authority: string; + plan?: PlanSDKType; +} +/** + * MsgSoftwareUpgradeResponse is the Msg/SoftwareUpgrade response type. + * + * Since: cosmos-sdk 0.46 + */ + +export interface MsgSoftwareUpgradeResponse {} +/** + * MsgSoftwareUpgradeResponse is the Msg/SoftwareUpgrade response type. + * + * Since: cosmos-sdk 0.46 + */ + +export interface MsgSoftwareUpgradeResponseSDKType {} +/** + * MsgCancelUpgrade is the Msg/CancelUpgrade request type. + * + * Since: cosmos-sdk 0.46 + */ + +export interface MsgCancelUpgrade { + /** authority is the address of the governance account. */ + authority: string; +} +/** + * MsgCancelUpgrade is the Msg/CancelUpgrade request type. + * + * Since: cosmos-sdk 0.46 + */ + +export interface MsgCancelUpgradeSDKType { + authority: string; +} +/** + * MsgCancelUpgradeResponse is the Msg/CancelUpgrade response type. + * + * Since: cosmos-sdk 0.46 + */ + +export interface MsgCancelUpgradeResponse {} +/** + * MsgCancelUpgradeResponse is the Msg/CancelUpgrade response type. + * + * Since: cosmos-sdk 0.46 + */ + +export interface MsgCancelUpgradeResponseSDKType {} + +function createBaseMsgSoftwareUpgrade(): MsgSoftwareUpgrade { + return { + authority: "", + plan: undefined + }; +} + +export const MsgSoftwareUpgrade = { + encode(message: MsgSoftwareUpgrade, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSoftwareUpgrade { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSoftwareUpgrade(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + case 2: + message.plan = Plan.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSoftwareUpgrade { + const message = createBaseMsgSoftwareUpgrade(); + message.authority = object.authority ?? ""; + message.plan = object.plan !== undefined && object.plan !== null ? Plan.fromPartial(object.plan) : undefined; + return message; + } + +}; + +function createBaseMsgSoftwareUpgradeResponse(): MsgSoftwareUpgradeResponse { + return {}; +} + +export const MsgSoftwareUpgradeResponse = { + encode(_: MsgSoftwareUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSoftwareUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSoftwareUpgradeResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgSoftwareUpgradeResponse { + const message = createBaseMsgSoftwareUpgradeResponse(); + return message; + } + +}; + +function createBaseMsgCancelUpgrade(): MsgCancelUpgrade { + return { + authority: "" + }; +} + +export const MsgCancelUpgrade = { + encode(message: MsgCancelUpgrade, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCancelUpgrade { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCancelUpgrade(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCancelUpgrade { + const message = createBaseMsgCancelUpgrade(); + message.authority = object.authority ?? ""; + return message; + } + +}; + +function createBaseMsgCancelUpgradeResponse(): MsgCancelUpgradeResponse { + return {}; +} + +export const MsgCancelUpgradeResponse = { + encode(_: MsgCancelUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCancelUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCancelUpgradeResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgCancelUpgradeResponse { + const message = createBaseMsgCancelUpgradeResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/upgrade/v1beta1/upgrade.ts b/packages/codegen/src/cosmos/upgrade/v1beta1/upgrade.ts new file mode 100644 index 00000000..5ca78d54 --- /dev/null +++ b/packages/codegen/src/cosmos/upgrade/v1beta1/upgrade.ts @@ -0,0 +1,398 @@ +import { Timestamp } from "../../../google/protobuf/timestamp"; +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Long, toTimestamp, fromTimestamp, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** Plan specifies information about a planned upgrade and when it should occur. */ + +export interface Plan { + /** + * Sets the name for the upgrade. This name will be used by the upgraded + * version of the software to apply any special "on-upgrade" commands during + * the first BeginBlock method after the upgrade is applied. It is also used + * to detect whether a software version can handle a given upgrade. If no + * upgrade handler with this name has been set in the software, it will be + * assumed that the software is out-of-date when the upgrade Time or Height is + * reached and the software will exit. + */ + name: string; + /** + * Deprecated: Time based upgrades have been deprecated. Time based upgrade logic + * has been removed from the SDK. + * If this field is not empty, an error will be thrown. + */ + + /** @deprecated */ + + time?: Date; + /** + * The height at which the upgrade must be performed. + * Only used if Time is not set. + */ + + height: Long; + /** + * Any application specific upgrade info to be included on-chain + * such as a git commit that validators could automatically upgrade to + */ + + info: string; + /** + * Deprecated: UpgradedClientState field has been deprecated. IBC upgrade logic has been + * moved to the IBC module in the sub module 02-client. + * If this field is not empty, an error will be thrown. + */ + + /** @deprecated */ + + upgradedClientState?: Any; +} +/** Plan specifies information about a planned upgrade and when it should occur. */ + +export interface PlanSDKType { + name: string; + /** @deprecated */ + + time?: Date; + height: Long; + info: string; + /** @deprecated */ + + upgraded_client_state?: AnySDKType; +} +/** + * SoftwareUpgradeProposal is a gov Content type for initiating a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgSoftwareUpgrade. + */ + +/** @deprecated */ + +export interface SoftwareUpgradeProposal { + title: string; + description: string; + plan?: Plan; +} +/** + * SoftwareUpgradeProposal is a gov Content type for initiating a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgSoftwareUpgrade. + */ + +/** @deprecated */ + +export interface SoftwareUpgradeProposalSDKType { + title: string; + description: string; + plan?: PlanSDKType; +} +/** + * CancelSoftwareUpgradeProposal is a gov Content type for cancelling a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgCancelUpgrade. + */ + +/** @deprecated */ + +export interface CancelSoftwareUpgradeProposal { + title: string; + description: string; +} +/** + * CancelSoftwareUpgradeProposal is a gov Content type for cancelling a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgCancelUpgrade. + */ + +/** @deprecated */ + +export interface CancelSoftwareUpgradeProposalSDKType { + title: string; + description: string; +} +/** + * ModuleVersion specifies a module and its consensus version. + * + * Since: cosmos-sdk 0.43 + */ + +export interface ModuleVersion { + /** name of the app module */ + name: string; + /** consensus version of the app module */ + + version: Long; +} +/** + * ModuleVersion specifies a module and its consensus version. + * + * Since: cosmos-sdk 0.43 + */ + +export interface ModuleVersionSDKType { + name: string; + version: Long; +} + +function createBasePlan(): Plan { + return { + name: "", + time: undefined, + height: Long.ZERO, + info: "", + upgradedClientState: undefined + }; +} + +export const Plan = { + encode(message: Plan, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(18).fork()).ldelim(); + } + + if (!message.height.isZero()) { + writer.uint32(24).int64(message.height); + } + + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + + if (message.upgradedClientState !== undefined) { + Any.encode(message.upgradedClientState, writer.uint32(42).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Plan { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePlan(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 3: + message.height = (reader.int64() as Long); + break; + + case 4: + message.info = reader.string(); + break; + + case 5: + message.upgradedClientState = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Plan { + const message = createBasePlan(); + message.name = object.name ?? ""; + message.time = object.time ?? undefined; + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.info = object.info ?? ""; + message.upgradedClientState = object.upgradedClientState !== undefined && object.upgradedClientState !== null ? Any.fromPartial(object.upgradedClientState) : undefined; + return message; + } + +}; + +function createBaseSoftwareUpgradeProposal(): SoftwareUpgradeProposal { + return { + title: "", + description: "", + plan: undefined + }; +} + +export const SoftwareUpgradeProposal = { + encode(message: SoftwareUpgradeProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SoftwareUpgradeProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSoftwareUpgradeProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.plan = Plan.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SoftwareUpgradeProposal { + const message = createBaseSoftwareUpgradeProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.plan = object.plan !== undefined && object.plan !== null ? Plan.fromPartial(object.plan) : undefined; + return message; + } + +}; + +function createBaseCancelSoftwareUpgradeProposal(): CancelSoftwareUpgradeProposal { + return { + title: "", + description: "" + }; +} + +export const CancelSoftwareUpgradeProposal = { + encode(message: CancelSoftwareUpgradeProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CancelSoftwareUpgradeProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCancelSoftwareUpgradeProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CancelSoftwareUpgradeProposal { + const message = createBaseCancelSoftwareUpgradeProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + return message; + } + +}; + +function createBaseModuleVersion(): ModuleVersion { + return { + name: "", + version: Long.UZERO + }; +} + +export const ModuleVersion = { + encode(message: ModuleVersion, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (!message.version.isZero()) { + writer.uint32(16).uint64(message.version); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleVersion { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleVersion(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.version = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ModuleVersion { + const message = createBaseModuleVersion(); + message.name = object.name ?? ""; + message.version = object.version !== undefined && object.version !== null ? Long.fromValue(object.version) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/vesting/v1beta1/tx.rpc.msg.ts b/packages/codegen/src/cosmos/vesting/v1beta1/tx.rpc.msg.ts new file mode 100644 index 00000000..f0728ea1 --- /dev/null +++ b/packages/codegen/src/cosmos/vesting/v1beta1/tx.rpc.msg.ts @@ -0,0 +1,53 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgCreateVestingAccount, MsgCreateVestingAccountResponse, MsgCreatePermanentLockedAccount, MsgCreatePermanentLockedAccountResponse, MsgCreatePeriodicVestingAccount, MsgCreatePeriodicVestingAccountResponse } from "./tx"; +/** Msg defines the bank Msg service. */ + +export interface Msg { + /** + * CreateVestingAccount defines a method that enables creating a vesting + * account. + */ + createVestingAccount(request: MsgCreateVestingAccount): Promise; + /** + * CreatePermanentLockedAccount defines a method that enables creating a permanent + * locked account. + */ + + createPermanentLockedAccount(request: MsgCreatePermanentLockedAccount): Promise; + /** + * CreatePeriodicVestingAccount defines a method that enables creating a + * periodic vesting account. + */ + + createPeriodicVestingAccount(request: MsgCreatePeriodicVestingAccount): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.createVestingAccount = this.createVestingAccount.bind(this); + this.createPermanentLockedAccount = this.createPermanentLockedAccount.bind(this); + this.createPeriodicVestingAccount = this.createPeriodicVestingAccount.bind(this); + } + + createVestingAccount(request: MsgCreateVestingAccount): Promise { + const data = MsgCreateVestingAccount.encode(request).finish(); + const promise = this.rpc.request("cosmos.vesting.v1beta1.Msg", "CreateVestingAccount", data); + return promise.then(data => MsgCreateVestingAccountResponse.decode(new _m0.Reader(data))); + } + + createPermanentLockedAccount(request: MsgCreatePermanentLockedAccount): Promise { + const data = MsgCreatePermanentLockedAccount.encode(request).finish(); + const promise = this.rpc.request("cosmos.vesting.v1beta1.Msg", "CreatePermanentLockedAccount", data); + return promise.then(data => MsgCreatePermanentLockedAccountResponse.decode(new _m0.Reader(data))); + } + + createPeriodicVestingAccount(request: MsgCreatePeriodicVestingAccount): Promise { + const data = MsgCreatePeriodicVestingAccount.encode(request).finish(); + const promise = this.rpc.request("cosmos.vesting.v1beta1.Msg", "CreatePeriodicVestingAccount", data); + return promise.then(data => MsgCreatePeriodicVestingAccountResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmos/vesting/v1beta1/tx.ts b/packages/codegen/src/cosmos/vesting/v1beta1/tx.ts new file mode 100644 index 00000000..cad743c8 --- /dev/null +++ b/packages/codegen/src/cosmos/vesting/v1beta1/tx.ts @@ -0,0 +1,421 @@ +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Period, PeriodSDKType } from "./vesting"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * MsgCreateVestingAccount defines a message that enables creating a vesting + * account. + */ + +export interface MsgCreateVestingAccount { + fromAddress: string; + toAddress: string; + amount: Coin[]; + endTime: Long; + delayed: boolean; +} +/** + * MsgCreateVestingAccount defines a message that enables creating a vesting + * account. + */ + +export interface MsgCreateVestingAccountSDKType { + from_address: string; + to_address: string; + amount: CoinSDKType[]; + end_time: Long; + delayed: boolean; +} +/** MsgCreateVestingAccountResponse defines the Msg/CreateVestingAccount response type. */ + +export interface MsgCreateVestingAccountResponse {} +/** MsgCreateVestingAccountResponse defines the Msg/CreateVestingAccount response type. */ + +export interface MsgCreateVestingAccountResponseSDKType {} +/** + * MsgCreatePermanentLockedAccount defines a message that enables creating a permanent + * locked account. + */ + +export interface MsgCreatePermanentLockedAccount { + fromAddress: string; + toAddress: string; + amount: Coin[]; +} +/** + * MsgCreatePermanentLockedAccount defines a message that enables creating a permanent + * locked account. + */ + +export interface MsgCreatePermanentLockedAccountSDKType { + from_address: string; + to_address: string; + amount: CoinSDKType[]; +} +/** MsgCreatePermanentLockedAccountResponse defines the Msg/CreatePermanentLockedAccount response type. */ + +export interface MsgCreatePermanentLockedAccountResponse {} +/** MsgCreatePermanentLockedAccountResponse defines the Msg/CreatePermanentLockedAccount response type. */ + +export interface MsgCreatePermanentLockedAccountResponseSDKType {} +/** + * MsgCreateVestingAccount defines a message that enables creating a vesting + * account. + */ + +export interface MsgCreatePeriodicVestingAccount { + fromAddress: string; + toAddress: string; + startTime: Long; + vestingPeriods: Period[]; +} +/** + * MsgCreateVestingAccount defines a message that enables creating a vesting + * account. + */ + +export interface MsgCreatePeriodicVestingAccountSDKType { + from_address: string; + to_address: string; + start_time: Long; + vesting_periods: PeriodSDKType[]; +} +/** + * MsgCreateVestingAccountResponse defines the Msg/CreatePeriodicVestingAccount + * response type. + */ + +export interface MsgCreatePeriodicVestingAccountResponse {} +/** + * MsgCreateVestingAccountResponse defines the Msg/CreatePeriodicVestingAccount + * response type. + */ + +export interface MsgCreatePeriodicVestingAccountResponseSDKType {} + +function createBaseMsgCreateVestingAccount(): MsgCreateVestingAccount { + return { + fromAddress: "", + toAddress: "", + amount: [], + endTime: Long.ZERO, + delayed: false + }; +} + +export const MsgCreateVestingAccount = { + encode(message: MsgCreateVestingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fromAddress !== "") { + writer.uint32(10).string(message.fromAddress); + } + + if (message.toAddress !== "") { + writer.uint32(18).string(message.toAddress); + } + + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + if (!message.endTime.isZero()) { + writer.uint32(32).int64(message.endTime); + } + + if (message.delayed === true) { + writer.uint32(40).bool(message.delayed); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateVestingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateVestingAccount(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.fromAddress = reader.string(); + break; + + case 2: + message.toAddress = reader.string(); + break; + + case 3: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + case 4: + message.endTime = (reader.int64() as Long); + break; + + case 5: + message.delayed = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCreateVestingAccount { + const message = createBaseMsgCreateVestingAccount(); + message.fromAddress = object.fromAddress ?? ""; + message.toAddress = object.toAddress ?? ""; + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + message.endTime = object.endTime !== undefined && object.endTime !== null ? Long.fromValue(object.endTime) : Long.ZERO; + message.delayed = object.delayed ?? false; + return message; + } + +}; + +function createBaseMsgCreateVestingAccountResponse(): MsgCreateVestingAccountResponse { + return {}; +} + +export const MsgCreateVestingAccountResponse = { + encode(_: MsgCreateVestingAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateVestingAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateVestingAccountResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgCreateVestingAccountResponse { + const message = createBaseMsgCreateVestingAccountResponse(); + return message; + } + +}; + +function createBaseMsgCreatePermanentLockedAccount(): MsgCreatePermanentLockedAccount { + return { + fromAddress: "", + toAddress: "", + amount: [] + }; +} + +export const MsgCreatePermanentLockedAccount = { + encode(message: MsgCreatePermanentLockedAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fromAddress !== "") { + writer.uint32(10).string(message.fromAddress); + } + + if (message.toAddress !== "") { + writer.uint32(18).string(message.toAddress); + } + + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreatePermanentLockedAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreatePermanentLockedAccount(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.fromAddress = reader.string(); + break; + + case 2: + message.toAddress = reader.string(); + break; + + case 3: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCreatePermanentLockedAccount { + const message = createBaseMsgCreatePermanentLockedAccount(); + message.fromAddress = object.fromAddress ?? ""; + message.toAddress = object.toAddress ?? ""; + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgCreatePermanentLockedAccountResponse(): MsgCreatePermanentLockedAccountResponse { + return {}; +} + +export const MsgCreatePermanentLockedAccountResponse = { + encode(_: MsgCreatePermanentLockedAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreatePermanentLockedAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreatePermanentLockedAccountResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgCreatePermanentLockedAccountResponse { + const message = createBaseMsgCreatePermanentLockedAccountResponse(); + return message; + } + +}; + +function createBaseMsgCreatePeriodicVestingAccount(): MsgCreatePeriodicVestingAccount { + return { + fromAddress: "", + toAddress: "", + startTime: Long.ZERO, + vestingPeriods: [] + }; +} + +export const MsgCreatePeriodicVestingAccount = { + encode(message: MsgCreatePeriodicVestingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fromAddress !== "") { + writer.uint32(10).string(message.fromAddress); + } + + if (message.toAddress !== "") { + writer.uint32(18).string(message.toAddress); + } + + if (!message.startTime.isZero()) { + writer.uint32(24).int64(message.startTime); + } + + for (const v of message.vestingPeriods) { + Period.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreatePeriodicVestingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreatePeriodicVestingAccount(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.fromAddress = reader.string(); + break; + + case 2: + message.toAddress = reader.string(); + break; + + case 3: + message.startTime = (reader.int64() as Long); + break; + + case 4: + message.vestingPeriods.push(Period.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCreatePeriodicVestingAccount { + const message = createBaseMsgCreatePeriodicVestingAccount(); + message.fromAddress = object.fromAddress ?? ""; + message.toAddress = object.toAddress ?? ""; + message.startTime = object.startTime !== undefined && object.startTime !== null ? Long.fromValue(object.startTime) : Long.ZERO; + message.vestingPeriods = object.vestingPeriods?.map(e => Period.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgCreatePeriodicVestingAccountResponse(): MsgCreatePeriodicVestingAccountResponse { + return {}; +} + +export const MsgCreatePeriodicVestingAccountResponse = { + encode(_: MsgCreatePeriodicVestingAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreatePeriodicVestingAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreatePeriodicVestingAccountResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgCreatePeriodicVestingAccountResponse { + const message = createBaseMsgCreatePeriodicVestingAccountResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos/vesting/v1beta1/vesting.ts b/packages/codegen/src/cosmos/vesting/v1beta1/vesting.ts new file mode 100644 index 00000000..bd9ae830 --- /dev/null +++ b/packages/codegen/src/cosmos/vesting/v1beta1/vesting.ts @@ -0,0 +1,468 @@ +import { BaseAccount, BaseAccountSDKType } from "../../auth/v1beta1/auth"; +import { Coin, CoinSDKType } from "../../base/v1beta1/coin"; +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * BaseVestingAccount implements the VestingAccount interface. It contains all + * the necessary fields needed for any vesting account implementation. + */ + +export interface BaseVestingAccount { + baseAccount?: BaseAccount; + originalVesting: Coin[]; + delegatedFree: Coin[]; + delegatedVesting: Coin[]; + endTime: Long; +} +/** + * BaseVestingAccount implements the VestingAccount interface. It contains all + * the necessary fields needed for any vesting account implementation. + */ + +export interface BaseVestingAccountSDKType { + base_account?: BaseAccountSDKType; + original_vesting: CoinSDKType[]; + delegated_free: CoinSDKType[]; + delegated_vesting: CoinSDKType[]; + end_time: Long; +} +/** + * ContinuousVestingAccount implements the VestingAccount interface. It + * continuously vests by unlocking coins linearly with respect to time. + */ + +export interface ContinuousVestingAccount { + baseVestingAccount?: BaseVestingAccount; + startTime: Long; +} +/** + * ContinuousVestingAccount implements the VestingAccount interface. It + * continuously vests by unlocking coins linearly with respect to time. + */ + +export interface ContinuousVestingAccountSDKType { + base_vesting_account?: BaseVestingAccountSDKType; + start_time: Long; +} +/** + * DelayedVestingAccount implements the VestingAccount interface. It vests all + * coins after a specific time, but non prior. In other words, it keeps them + * locked until a specified time. + */ + +export interface DelayedVestingAccount { + baseVestingAccount?: BaseVestingAccount; +} +/** + * DelayedVestingAccount implements the VestingAccount interface. It vests all + * coins after a specific time, but non prior. In other words, it keeps them + * locked until a specified time. + */ + +export interface DelayedVestingAccountSDKType { + base_vesting_account?: BaseVestingAccountSDKType; +} +/** Period defines a length of time and amount of coins that will vest. */ + +export interface Period { + length: Long; + amount: Coin[]; +} +/** Period defines a length of time and amount of coins that will vest. */ + +export interface PeriodSDKType { + length: Long; + amount: CoinSDKType[]; +} +/** + * PeriodicVestingAccount implements the VestingAccount interface. It + * periodically vests by unlocking coins during each specified period. + */ + +export interface PeriodicVestingAccount { + baseVestingAccount?: BaseVestingAccount; + startTime: Long; + vestingPeriods: Period[]; +} +/** + * PeriodicVestingAccount implements the VestingAccount interface. It + * periodically vests by unlocking coins during each specified period. + */ + +export interface PeriodicVestingAccountSDKType { + base_vesting_account?: BaseVestingAccountSDKType; + start_time: Long; + vesting_periods: PeriodSDKType[]; +} +/** + * PermanentLockedAccount implements the VestingAccount interface. It does + * not ever release coins, locking them indefinitely. Coins in this account can + * still be used for delegating and for governance votes even while locked. + * + * Since: cosmos-sdk 0.43 + */ + +export interface PermanentLockedAccount { + baseVestingAccount?: BaseVestingAccount; +} +/** + * PermanentLockedAccount implements the VestingAccount interface. It does + * not ever release coins, locking them indefinitely. Coins in this account can + * still be used for delegating and for governance votes even while locked. + * + * Since: cosmos-sdk 0.43 + */ + +export interface PermanentLockedAccountSDKType { + base_vesting_account?: BaseVestingAccountSDKType; +} + +function createBaseBaseVestingAccount(): BaseVestingAccount { + return { + baseAccount: undefined, + originalVesting: [], + delegatedFree: [], + delegatedVesting: [], + endTime: Long.ZERO + }; +} + +export const BaseVestingAccount = { + encode(message: BaseVestingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseAccount !== undefined) { + BaseAccount.encode(message.baseAccount, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.originalVesting) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.delegatedFree) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.delegatedVesting) { + Coin.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + if (!message.endTime.isZero()) { + writer.uint32(40).int64(message.endTime); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BaseVestingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBaseVestingAccount(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.baseAccount = BaseAccount.decode(reader, reader.uint32()); + break; + + case 2: + message.originalVesting.push(Coin.decode(reader, reader.uint32())); + break; + + case 3: + message.delegatedFree.push(Coin.decode(reader, reader.uint32())); + break; + + case 4: + message.delegatedVesting.push(Coin.decode(reader, reader.uint32())); + break; + + case 5: + message.endTime = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BaseVestingAccount { + const message = createBaseBaseVestingAccount(); + message.baseAccount = object.baseAccount !== undefined && object.baseAccount !== null ? BaseAccount.fromPartial(object.baseAccount) : undefined; + message.originalVesting = object.originalVesting?.map(e => Coin.fromPartial(e)) || []; + message.delegatedFree = object.delegatedFree?.map(e => Coin.fromPartial(e)) || []; + message.delegatedVesting = object.delegatedVesting?.map(e => Coin.fromPartial(e)) || []; + message.endTime = object.endTime !== undefined && object.endTime !== null ? Long.fromValue(object.endTime) : Long.ZERO; + return message; + } + +}; + +function createBaseContinuousVestingAccount(): ContinuousVestingAccount { + return { + baseVestingAccount: undefined, + startTime: Long.ZERO + }; +} + +export const ContinuousVestingAccount = { + encode(message: ContinuousVestingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseVestingAccount !== undefined) { + BaseVestingAccount.encode(message.baseVestingAccount, writer.uint32(10).fork()).ldelim(); + } + + if (!message.startTime.isZero()) { + writer.uint32(16).int64(message.startTime); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ContinuousVestingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseContinuousVestingAccount(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.baseVestingAccount = BaseVestingAccount.decode(reader, reader.uint32()); + break; + + case 2: + message.startTime = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ContinuousVestingAccount { + const message = createBaseContinuousVestingAccount(); + message.baseVestingAccount = object.baseVestingAccount !== undefined && object.baseVestingAccount !== null ? BaseVestingAccount.fromPartial(object.baseVestingAccount) : undefined; + message.startTime = object.startTime !== undefined && object.startTime !== null ? Long.fromValue(object.startTime) : Long.ZERO; + return message; + } + +}; + +function createBaseDelayedVestingAccount(): DelayedVestingAccount { + return { + baseVestingAccount: undefined + }; +} + +export const DelayedVestingAccount = { + encode(message: DelayedVestingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseVestingAccount !== undefined) { + BaseVestingAccount.encode(message.baseVestingAccount, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DelayedVestingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDelayedVestingAccount(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.baseVestingAccount = BaseVestingAccount.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DelayedVestingAccount { + const message = createBaseDelayedVestingAccount(); + message.baseVestingAccount = object.baseVestingAccount !== undefined && object.baseVestingAccount !== null ? BaseVestingAccount.fromPartial(object.baseVestingAccount) : undefined; + return message; + } + +}; + +function createBasePeriod(): Period { + return { + length: Long.ZERO, + amount: [] + }; +} + +export const Period = { + encode(message: Period, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.length.isZero()) { + writer.uint32(8).int64(message.length); + } + + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Period { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePeriod(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.length = (reader.int64() as Long); + break; + + case 2: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Period { + const message = createBasePeriod(); + message.length = object.length !== undefined && object.length !== null ? Long.fromValue(object.length) : Long.ZERO; + message.amount = object.amount?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBasePeriodicVestingAccount(): PeriodicVestingAccount { + return { + baseVestingAccount: undefined, + startTime: Long.ZERO, + vestingPeriods: [] + }; +} + +export const PeriodicVestingAccount = { + encode(message: PeriodicVestingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseVestingAccount !== undefined) { + BaseVestingAccount.encode(message.baseVestingAccount, writer.uint32(10).fork()).ldelim(); + } + + if (!message.startTime.isZero()) { + writer.uint32(16).int64(message.startTime); + } + + for (const v of message.vestingPeriods) { + Period.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PeriodicVestingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePeriodicVestingAccount(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.baseVestingAccount = BaseVestingAccount.decode(reader, reader.uint32()); + break; + + case 2: + message.startTime = (reader.int64() as Long); + break; + + case 3: + message.vestingPeriods.push(Period.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PeriodicVestingAccount { + const message = createBasePeriodicVestingAccount(); + message.baseVestingAccount = object.baseVestingAccount !== undefined && object.baseVestingAccount !== null ? BaseVestingAccount.fromPartial(object.baseVestingAccount) : undefined; + message.startTime = object.startTime !== undefined && object.startTime !== null ? Long.fromValue(object.startTime) : Long.ZERO; + message.vestingPeriods = object.vestingPeriods?.map(e => Period.fromPartial(e)) || []; + return message; + } + +}; + +function createBasePermanentLockedAccount(): PermanentLockedAccount { + return { + baseVestingAccount: undefined + }; +} + +export const PermanentLockedAccount = { + encode(message: PermanentLockedAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseVestingAccount !== undefined) { + BaseVestingAccount.encode(message.baseVestingAccount, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PermanentLockedAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePermanentLockedAccount(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.baseVestingAccount = BaseVestingAccount.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PermanentLockedAccount { + const message = createBasePermanentLockedAccount(); + message.baseVestingAccount = object.baseVestingAccount !== undefined && object.baseVestingAccount !== null ? BaseVestingAccount.fromPartial(object.baseVestingAccount) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos_proto/bundle.ts b/packages/codegen/src/cosmos_proto/bundle.ts new file mode 100644 index 00000000..3dcd4c86 --- /dev/null +++ b/packages/codegen/src/cosmos_proto/bundle.ts @@ -0,0 +1,3 @@ +import * as _2 from "./cosmos"; +export const cosmos_proto = { ..._2 +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmos_proto/cosmos.ts b/packages/codegen/src/cosmos_proto/cosmos.ts new file mode 100644 index 00000000..5999944b --- /dev/null +++ b/packages/codegen/src/cosmos_proto/cosmos.ts @@ -0,0 +1,255 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../helpers"; +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} +export const ScalarTypeSDKType = ScalarType; +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ + +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + + description: string; +} +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ + +export interface InterfaceDescriptorSDKType { + name: string; + description: string; +} +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ + +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + + fieldType: ScalarType[]; +} +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ + +export interface ScalarDescriptorSDKType { + name: string; + description: string; + field_type: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { + name: "", + description: "" + }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + } + +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { + name: "", + description: "", + fieldType: [] + }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + writer.uint32(26).fork(); + + for (const v of message.fieldType) { + writer.int32(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.fieldType.push((reader.int32() as any)); + } + } else { + message.fieldType.push((reader.int32() as any)); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map(e => e) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/bundle.ts b/packages/codegen/src/cosmwasm/bundle.ts new file mode 100644 index 00000000..f090e4ab --- /dev/null +++ b/packages/codegen/src/cosmwasm/bundle.ts @@ -0,0 +1,32 @@ +import * as _95 from "./wasm/v1/authz"; +import * as _96 from "./wasm/v1/genesis"; +import * as _97 from "./wasm/v1/ibc"; +import * as _98 from "./wasm/v1/proposal"; +import * as _99 from "./wasm/v1/query"; +import * as _100 from "./wasm/v1/tx"; +import * as _101 from "./wasm/v1/types"; +import * as _194 from "./wasm/v1/query.lcd"; +import * as _195 from "./wasm/v1/query.rpc.Query"; +import * as _196 from "./wasm/v1/tx.rpc.msg"; +import * as _213 from "./lcd"; +import * as _214 from "./rpc.query"; +import * as _215 from "./rpc.tx"; +export namespace cosmwasm { + export namespace wasm { + export const v1 = { ..._95, + ..._96, + ..._97, + ..._98, + ..._99, + ..._100, + ..._101, + ..._194, + ..._195, + ..._196 + }; + } + export const ClientFactory = { ..._213, + ..._214, + ..._215 + }; +} \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/lcd.ts b/packages/codegen/src/cosmwasm/lcd.ts new file mode 100644 index 00000000..ce16f358 --- /dev/null +++ b/packages/codegen/src/cosmwasm/lcd.ts @@ -0,0 +1,106 @@ +import { LCDClient } from "@osmonauts/lcd"; +export const createLCDClient = async ({ + restEndpoint +}: { + restEndpoint: string; +}) => { + const requestClient = new LCDClient({ + restEndpoint + }); + return { + cosmos: { + auth: { + v1beta1: new (await import("../cosmos/auth/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + authz: { + v1beta1: new (await import("../cosmos/authz/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + bank: { + v1beta1: new (await import("../cosmos/bank/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + base: { + tendermint: { + v1beta1: new (await import("../cosmos/base/tendermint/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + } + }, + distribution: { + v1beta1: new (await import("../cosmos/distribution/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + evidence: { + v1beta1: new (await import("../cosmos/evidence/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + feegrant: { + v1beta1: new (await import("../cosmos/feegrant/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + gov: { + v1: new (await import("../cosmos/gov/v1/query.lcd")).LCDQueryClient({ + requestClient + }), + v1beta1: new (await import("../cosmos/gov/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + group: { + v1: new (await import("../cosmos/group/v1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + mint: { + v1beta1: new (await import("../cosmos/mint/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + nft: { + v1beta1: new (await import("../cosmos/nft/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + params: { + v1beta1: new (await import("../cosmos/params/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + slashing: { + v1beta1: new (await import("../cosmos/slashing/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + staking: { + v1beta1: new (await import("../cosmos/staking/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + tx: { + v1beta1: new (await import("../cosmos/tx/v1beta1/service.lcd")).LCDQueryClient({ + requestClient + }) + }, + upgrade: { + v1beta1: new (await import("../cosmos/upgrade/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + } + }, + cosmwasm: { + wasm: { + v1: new (await import("./wasm/v1/query.lcd")).LCDQueryClient({ + requestClient + }) + } + } + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/rpc.query.ts b/packages/codegen/src/cosmwasm/rpc.query.ts new file mode 100644 index 00000000..8fa98029 --- /dev/null +++ b/packages/codegen/src/cosmwasm/rpc.query.ts @@ -0,0 +1,73 @@ +import { Tendermint34Client, HttpEndpoint } from "@cosmjs/tendermint-rpc"; +import { QueryClient } from "@cosmjs/stargate"; +export const createRPCQueryClient = async ({ + rpcEndpoint +}: { + rpcEndpoint: string | HttpEndpoint; +}) => { + const tmClient = await Tendermint34Client.connect(rpcEndpoint); + const client = new QueryClient(tmClient); + return { + cosmos: { + app: { + v1alpha1: (await import("../cosmos/app/v1alpha1/query.rpc.Query")).createRpcQueryExtension(client) + }, + auth: { + v1beta1: (await import("../cosmos/auth/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + authz: { + v1beta1: (await import("../cosmos/authz/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + bank: { + v1beta1: (await import("../cosmos/bank/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + base: { + tendermint: { + v1beta1: (await import("../cosmos/base/tendermint/v1beta1/query.rpc.Service")).createRpcQueryExtension(client) + } + }, + distribution: { + v1beta1: (await import("../cosmos/distribution/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + evidence: { + v1beta1: (await import("../cosmos/evidence/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + feegrant: { + v1beta1: (await import("../cosmos/feegrant/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + gov: { + v1: (await import("../cosmos/gov/v1/query.rpc.Query")).createRpcQueryExtension(client), + v1beta1: (await import("../cosmos/gov/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + group: { + v1: (await import("../cosmos/group/v1/query.rpc.Query")).createRpcQueryExtension(client) + }, + mint: { + v1beta1: (await import("../cosmos/mint/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + nft: { + v1beta1: (await import("../cosmos/nft/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + params: { + v1beta1: (await import("../cosmos/params/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + slashing: { + v1beta1: (await import("../cosmos/slashing/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + staking: { + v1beta1: (await import("../cosmos/staking/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + tx: { + v1beta1: (await import("../cosmos/tx/v1beta1/service.rpc.Service")).createRpcQueryExtension(client) + }, + upgrade: { + v1beta1: (await import("../cosmos/upgrade/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + } + }, + cosmwasm: { + wasm: { + v1: (await import("./wasm/v1/query.rpc.Query")).createRpcQueryExtension(client) + } + } + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/rpc.tx.ts b/packages/codegen/src/cosmwasm/rpc.tx.ts new file mode 100644 index 00000000..e0ff07d4 --- /dev/null +++ b/packages/codegen/src/cosmwasm/rpc.tx.ts @@ -0,0 +1,54 @@ +import { Rpc } from "../helpers"; +export const createRPCMsgClient = async ({ + rpc +}: { + rpc: Rpc; +}) => ({ + cosmos: { + authz: { + v1beta1: new (await import("../cosmos/authz/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + bank: { + v1beta1: new (await import("../cosmos/bank/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + crisis: { + v1beta1: new (await import("../cosmos/crisis/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + distribution: { + v1beta1: new (await import("../cosmos/distribution/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + evidence: { + v1beta1: new (await import("../cosmos/evidence/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + feegrant: { + v1beta1: new (await import("../cosmos/feegrant/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + gov: { + v1: new (await import("../cosmos/gov/v1/tx.rpc.msg")).MsgClientImpl(rpc), + v1beta1: new (await import("../cosmos/gov/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + group: { + v1: new (await import("../cosmos/group/v1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + nft: { + v1beta1: new (await import("../cosmos/nft/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + slashing: { + v1beta1: new (await import("../cosmos/slashing/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + staking: { + v1beta1: new (await import("../cosmos/staking/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + upgrade: { + v1beta1: new (await import("../cosmos/upgrade/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + vesting: { + v1beta1: new (await import("../cosmos/vesting/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + } + }, + cosmwasm: { + wasm: { + v1: new (await import("./wasm/v1/tx.rpc.msg")).MsgClientImpl(rpc) + } + } +}); \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/wasm/v1/authz.ts b/packages/codegen/src/cosmwasm/wasm/v1/authz.ts new file mode 100644 index 00000000..b45419ea --- /dev/null +++ b/packages/codegen/src/cosmwasm/wasm/v1/authz.ts @@ -0,0 +1,603 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import { Coin, CoinSDKType } from "../../../cosmos/base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** + * ContractExecutionAuthorization defines authorization for wasm execute. + * Since: wasmd 0.30 + */ + +export interface ContractExecutionAuthorization { + /** Grants for contract executions */ + grants: ContractGrant[]; +} +/** + * ContractExecutionAuthorization defines authorization for wasm execute. + * Since: wasmd 0.30 + */ + +export interface ContractExecutionAuthorizationSDKType { + grants: ContractGrantSDKType[]; +} +/** + * ContractMigrationAuthorization defines authorization for wasm contract + * migration. Since: wasmd 0.30 + */ + +export interface ContractMigrationAuthorization { + /** Grants for contract migrations */ + grants: ContractGrant[]; +} +/** + * ContractMigrationAuthorization defines authorization for wasm contract + * migration. Since: wasmd 0.30 + */ + +export interface ContractMigrationAuthorizationSDKType { + grants: ContractGrantSDKType[]; +} +/** + * ContractGrant a granted permission for a single contract + * Since: wasmd 0.30 + */ + +export interface ContractGrant { + /** Contract is the bech32 address of the smart contract */ + contract: string; + /** + * Limit defines execution limits that are enforced and updated when the grant + * is applied. When the limit lapsed the grant is removed. + */ + + limit?: Any; + /** + * Filter define more fine-grained control on the message payload passed + * to the contract in the operation. When no filter applies on execution, the + * operation is prohibited. + */ + + filter?: Any; +} +/** + * ContractGrant a granted permission for a single contract + * Since: wasmd 0.30 + */ + +export interface ContractGrantSDKType { + contract: string; + limit?: AnySDKType; + filter?: AnySDKType; +} +/** + * MaxCallsLimit limited number of calls to the contract. No funds transferable. + * Since: wasmd 0.30 + */ + +export interface MaxCallsLimit { + /** Remaining number that is decremented on each execution */ + remaining: Long; +} +/** + * MaxCallsLimit limited number of calls to the contract. No funds transferable. + * Since: wasmd 0.30 + */ + +export interface MaxCallsLimitSDKType { + remaining: Long; +} +/** + * MaxFundsLimit defines the maximal amounts that can be sent to the contract. + * Since: wasmd 0.30 + */ + +export interface MaxFundsLimit { + /** Amounts is the maximal amount of tokens transferable to the contract. */ + amounts: Coin[]; +} +/** + * MaxFundsLimit defines the maximal amounts that can be sent to the contract. + * Since: wasmd 0.30 + */ + +export interface MaxFundsLimitSDKType { + amounts: CoinSDKType[]; +} +/** + * CombinedLimit defines the maximal amounts that can be sent to a contract and + * the maximal number of calls executable. Both need to remain >0 to be valid. + * Since: wasmd 0.30 + */ + +export interface CombinedLimit { + /** Remaining number that is decremented on each execution */ + callsRemaining: Long; + /** Amounts is the maximal amount of tokens transferable to the contract. */ + + amounts: Coin[]; +} +/** + * CombinedLimit defines the maximal amounts that can be sent to a contract and + * the maximal number of calls executable. Both need to remain >0 to be valid. + * Since: wasmd 0.30 + */ + +export interface CombinedLimitSDKType { + calls_remaining: Long; + amounts: CoinSDKType[]; +} +/** + * AllowAllMessagesFilter is a wildcard to allow any type of contract payload + * message. + * Since: wasmd 0.30 + */ + +export interface AllowAllMessagesFilter {} +/** + * AllowAllMessagesFilter is a wildcard to allow any type of contract payload + * message. + * Since: wasmd 0.30 + */ + +export interface AllowAllMessagesFilterSDKType {} +/** + * AcceptedMessageKeysFilter accept only the specific contract message keys in + * the json object to be executed. + * Since: wasmd 0.30 + */ + +export interface AcceptedMessageKeysFilter { + /** Messages is the list of unique keys */ + keys: string[]; +} +/** + * AcceptedMessageKeysFilter accept only the specific contract message keys in + * the json object to be executed. + * Since: wasmd 0.30 + */ + +export interface AcceptedMessageKeysFilterSDKType { + keys: string[]; +} +/** + * AcceptedMessagesFilter accept only the specific raw contract messages to be + * executed. + * Since: wasmd 0.30 + */ + +export interface AcceptedMessagesFilter { + /** Messages is the list of raw contract messages */ + messages: Uint8Array[]; +} +/** + * AcceptedMessagesFilter accept only the specific raw contract messages to be + * executed. + * Since: wasmd 0.30 + */ + +export interface AcceptedMessagesFilterSDKType { + messages: Uint8Array[]; +} + +function createBaseContractExecutionAuthorization(): ContractExecutionAuthorization { + return { + grants: [] + }; +} + +export const ContractExecutionAuthorization = { + encode(message: ContractExecutionAuthorization, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.grants) { + ContractGrant.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ContractExecutionAuthorization { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseContractExecutionAuthorization(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.grants.push(ContractGrant.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ContractExecutionAuthorization { + const message = createBaseContractExecutionAuthorization(); + message.grants = object.grants?.map(e => ContractGrant.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseContractMigrationAuthorization(): ContractMigrationAuthorization { + return { + grants: [] + }; +} + +export const ContractMigrationAuthorization = { + encode(message: ContractMigrationAuthorization, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.grants) { + ContractGrant.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ContractMigrationAuthorization { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseContractMigrationAuthorization(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.grants.push(ContractGrant.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ContractMigrationAuthorization { + const message = createBaseContractMigrationAuthorization(); + message.grants = object.grants?.map(e => ContractGrant.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseContractGrant(): ContractGrant { + return { + contract: "", + limit: undefined, + filter: undefined + }; +} + +export const ContractGrant = { + encode(message: ContractGrant, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.contract !== "") { + writer.uint32(10).string(message.contract); + } + + if (message.limit !== undefined) { + Any.encode(message.limit, writer.uint32(18).fork()).ldelim(); + } + + if (message.filter !== undefined) { + Any.encode(message.filter, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ContractGrant { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseContractGrant(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.contract = reader.string(); + break; + + case 2: + message.limit = Any.decode(reader, reader.uint32()); + break; + + case 3: + message.filter = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ContractGrant { + const message = createBaseContractGrant(); + message.contract = object.contract ?? ""; + message.limit = object.limit !== undefined && object.limit !== null ? Any.fromPartial(object.limit) : undefined; + message.filter = object.filter !== undefined && object.filter !== null ? Any.fromPartial(object.filter) : undefined; + return message; + } + +}; + +function createBaseMaxCallsLimit(): MaxCallsLimit { + return { + remaining: Long.UZERO + }; +} + +export const MaxCallsLimit = { + encode(message: MaxCallsLimit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.remaining.isZero()) { + writer.uint32(8).uint64(message.remaining); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MaxCallsLimit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMaxCallsLimit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.remaining = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MaxCallsLimit { + const message = createBaseMaxCallsLimit(); + message.remaining = object.remaining !== undefined && object.remaining !== null ? Long.fromValue(object.remaining) : Long.UZERO; + return message; + } + +}; + +function createBaseMaxFundsLimit(): MaxFundsLimit { + return { + amounts: [] + }; +} + +export const MaxFundsLimit = { + encode(message: MaxFundsLimit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.amounts) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MaxFundsLimit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMaxFundsLimit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.amounts.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MaxFundsLimit { + const message = createBaseMaxFundsLimit(); + message.amounts = object.amounts?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseCombinedLimit(): CombinedLimit { + return { + callsRemaining: Long.UZERO, + amounts: [] + }; +} + +export const CombinedLimit = { + encode(message: CombinedLimit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.callsRemaining.isZero()) { + writer.uint32(8).uint64(message.callsRemaining); + } + + for (const v of message.amounts) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CombinedLimit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCombinedLimit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.callsRemaining = (reader.uint64() as Long); + break; + + case 2: + message.amounts.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CombinedLimit { + const message = createBaseCombinedLimit(); + message.callsRemaining = object.callsRemaining !== undefined && object.callsRemaining !== null ? Long.fromValue(object.callsRemaining) : Long.UZERO; + message.amounts = object.amounts?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseAllowAllMessagesFilter(): AllowAllMessagesFilter { + return {}; +} + +export const AllowAllMessagesFilter = { + encode(_: AllowAllMessagesFilter, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AllowAllMessagesFilter { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAllowAllMessagesFilter(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): AllowAllMessagesFilter { + const message = createBaseAllowAllMessagesFilter(); + return message; + } + +}; + +function createBaseAcceptedMessageKeysFilter(): AcceptedMessageKeysFilter { + return { + keys: [] + }; +} + +export const AcceptedMessageKeysFilter = { + encode(message: AcceptedMessageKeysFilter, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.keys) { + writer.uint32(10).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AcceptedMessageKeysFilter { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAcceptedMessageKeysFilter(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.keys.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AcceptedMessageKeysFilter { + const message = createBaseAcceptedMessageKeysFilter(); + message.keys = object.keys?.map(e => e) || []; + return message; + } + +}; + +function createBaseAcceptedMessagesFilter(): AcceptedMessagesFilter { + return { + messages: [] + }; +} + +export const AcceptedMessagesFilter = { + encode(message: AcceptedMessagesFilter, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.messages) { + writer.uint32(10).bytes(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AcceptedMessagesFilter { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAcceptedMessagesFilter(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.messages.push(reader.bytes()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AcceptedMessagesFilter { + const message = createBaseAcceptedMessagesFilter(); + message.messages = object.messages?.map(e => e) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/wasm/v1/genesis.ts b/packages/codegen/src/cosmwasm/wasm/v1/genesis.ts new file mode 100644 index 00000000..7bb16707 --- /dev/null +++ b/packages/codegen/src/cosmwasm/wasm/v1/genesis.ts @@ -0,0 +1,345 @@ +import { Params, ParamsSDKType, CodeInfo, CodeInfoSDKType, ContractInfo, ContractInfoSDKType, Model, ModelSDKType, ContractCodeHistoryEntry, ContractCodeHistoryEntrySDKType } from "./types"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** GenesisState - genesis state of x/wasm */ + +export interface GenesisState { + params?: Params; + codes: Code[]; + contracts: Contract[]; + sequences: Sequence[]; +} +/** GenesisState - genesis state of x/wasm */ + +export interface GenesisStateSDKType { + params?: ParamsSDKType; + codes: CodeSDKType[]; + contracts: ContractSDKType[]; + sequences: SequenceSDKType[]; +} +/** Code struct encompasses CodeInfo and CodeBytes */ + +export interface Code { + codeId: Long; + codeInfo?: CodeInfo; + codeBytes: Uint8Array; + /** Pinned to wasmvm cache */ + + pinned: boolean; +} +/** Code struct encompasses CodeInfo and CodeBytes */ + +export interface CodeSDKType { + code_id: Long; + code_info?: CodeInfoSDKType; + code_bytes: Uint8Array; + pinned: boolean; +} +/** Contract struct encompasses ContractAddress, ContractInfo, and ContractState */ + +export interface Contract { + contractAddress: string; + contractInfo?: ContractInfo; + contractState: Model[]; + contractCodeHistory: ContractCodeHistoryEntry[]; +} +/** Contract struct encompasses ContractAddress, ContractInfo, and ContractState */ + +export interface ContractSDKType { + contract_address: string; + contract_info?: ContractInfoSDKType; + contract_state: ModelSDKType[]; + contract_code_history: ContractCodeHistoryEntrySDKType[]; +} +/** Sequence key and value of an id generation counter */ + +export interface Sequence { + idKey: Uint8Array; + value: Long; +} +/** Sequence key and value of an id generation counter */ + +export interface SequenceSDKType { + id_key: Uint8Array; + value: Long; +} + +function createBaseGenesisState(): GenesisState { + return { + params: undefined, + codes: [], + contracts: [], + sequences: [] + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.codes) { + Code.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.contracts) { + Contract.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.sequences) { + Sequence.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + case 2: + message.codes.push(Code.decode(reader, reader.uint32())); + break; + + case 3: + message.contracts.push(Contract.decode(reader, reader.uint32())); + break; + + case 4: + message.sequences.push(Sequence.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + message.codes = object.codes?.map(e => Code.fromPartial(e)) || []; + message.contracts = object.contracts?.map(e => Contract.fromPartial(e)) || []; + message.sequences = object.sequences?.map(e => Sequence.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseCode(): Code { + return { + codeId: Long.UZERO, + codeInfo: undefined, + codeBytes: new Uint8Array(), + pinned: false + }; +} + +export const Code = { + encode(message: Code, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.codeId.isZero()) { + writer.uint32(8).uint64(message.codeId); + } + + if (message.codeInfo !== undefined) { + CodeInfo.encode(message.codeInfo, writer.uint32(18).fork()).ldelim(); + } + + if (message.codeBytes.length !== 0) { + writer.uint32(26).bytes(message.codeBytes); + } + + if (message.pinned === true) { + writer.uint32(32).bool(message.pinned); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Code { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCode(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.codeId = (reader.uint64() as Long); + break; + + case 2: + message.codeInfo = CodeInfo.decode(reader, reader.uint32()); + break; + + case 3: + message.codeBytes = reader.bytes(); + break; + + case 4: + message.pinned = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Code { + const message = createBaseCode(); + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.codeInfo = object.codeInfo !== undefined && object.codeInfo !== null ? CodeInfo.fromPartial(object.codeInfo) : undefined; + message.codeBytes = object.codeBytes ?? new Uint8Array(); + message.pinned = object.pinned ?? false; + return message; + } + +}; + +function createBaseContract(): Contract { + return { + contractAddress: "", + contractInfo: undefined, + contractState: [], + contractCodeHistory: [] + }; +} + +export const Contract = { + encode(message: Contract, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.contractAddress !== "") { + writer.uint32(10).string(message.contractAddress); + } + + if (message.contractInfo !== undefined) { + ContractInfo.encode(message.contractInfo, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.contractState) { + Model.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.contractCodeHistory) { + ContractCodeHistoryEntry.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Contract { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseContract(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.contractAddress = reader.string(); + break; + + case 2: + message.contractInfo = ContractInfo.decode(reader, reader.uint32()); + break; + + case 3: + message.contractState.push(Model.decode(reader, reader.uint32())); + break; + + case 4: + message.contractCodeHistory.push(ContractCodeHistoryEntry.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Contract { + const message = createBaseContract(); + message.contractAddress = object.contractAddress ?? ""; + message.contractInfo = object.contractInfo !== undefined && object.contractInfo !== null ? ContractInfo.fromPartial(object.contractInfo) : undefined; + message.contractState = object.contractState?.map(e => Model.fromPartial(e)) || []; + message.contractCodeHistory = object.contractCodeHistory?.map(e => ContractCodeHistoryEntry.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseSequence(): Sequence { + return { + idKey: new Uint8Array(), + value: Long.UZERO + }; +} + +export const Sequence = { + encode(message: Sequence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.idKey.length !== 0) { + writer.uint32(10).bytes(message.idKey); + } + + if (!message.value.isZero()) { + writer.uint32(16).uint64(message.value); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Sequence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSequence(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.idKey = reader.bytes(); + break; + + case 2: + message.value = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Sequence { + const message = createBaseSequence(); + message.idKey = object.idKey ?? new Uint8Array(); + message.value = object.value !== undefined && object.value !== null ? Long.fromValue(object.value) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/wasm/v1/ibc.ts b/packages/codegen/src/cosmwasm/wasm/v1/ibc.ts new file mode 100644 index 00000000..8eb06381 --- /dev/null +++ b/packages/codegen/src/cosmwasm/wasm/v1/ibc.ts @@ -0,0 +1,164 @@ +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** MsgIBCSend */ + +export interface MsgIBCSend { + /** the channel by which the packet will be sent */ + channel: string; + /** + * Timeout height relative to the current block height. + * The timeout is disabled when set to 0. + */ + + timeoutHeight: Long; + /** + * Timeout timestamp (in nanoseconds) relative to the current block timestamp. + * The timeout is disabled when set to 0. + */ + + timeoutTimestamp: Long; + /** + * Data is the payload to transfer. We must not make assumption what format or + * content is in here. + */ + + data: Uint8Array; +} +/** MsgIBCSend */ + +export interface MsgIBCSendSDKType { + channel: string; + timeout_height: Long; + timeout_timestamp: Long; + data: Uint8Array; +} +/** MsgIBCCloseChannel port and channel need to be owned by the contract */ + +export interface MsgIBCCloseChannel { + channel: string; +} +/** MsgIBCCloseChannel port and channel need to be owned by the contract */ + +export interface MsgIBCCloseChannelSDKType { + channel: string; +} + +function createBaseMsgIBCSend(): MsgIBCSend { + return { + channel: "", + timeoutHeight: Long.UZERO, + timeoutTimestamp: Long.UZERO, + data: new Uint8Array() + }; +} + +export const MsgIBCSend = { + encode(message: MsgIBCSend, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.channel !== "") { + writer.uint32(18).string(message.channel); + } + + if (!message.timeoutHeight.isZero()) { + writer.uint32(32).uint64(message.timeoutHeight); + } + + if (!message.timeoutTimestamp.isZero()) { + writer.uint32(40).uint64(message.timeoutTimestamp); + } + + if (message.data.length !== 0) { + writer.uint32(50).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgIBCSend { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgIBCSend(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 2: + message.channel = reader.string(); + break; + + case 4: + message.timeoutHeight = (reader.uint64() as Long); + break; + + case 5: + message.timeoutTimestamp = (reader.uint64() as Long); + break; + + case 6: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgIBCSend { + const message = createBaseMsgIBCSend(); + message.channel = object.channel ?? ""; + message.timeoutHeight = object.timeoutHeight !== undefined && object.timeoutHeight !== null ? Long.fromValue(object.timeoutHeight) : Long.UZERO; + message.timeoutTimestamp = object.timeoutTimestamp !== undefined && object.timeoutTimestamp !== null ? Long.fromValue(object.timeoutTimestamp) : Long.UZERO; + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMsgIBCCloseChannel(): MsgIBCCloseChannel { + return { + channel: "" + }; +} + +export const MsgIBCCloseChannel = { + encode(message: MsgIBCCloseChannel, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.channel !== "") { + writer.uint32(18).string(message.channel); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgIBCCloseChannel { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgIBCCloseChannel(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 2: + message.channel = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgIBCCloseChannel { + const message = createBaseMsgIBCCloseChannel(); + message.channel = object.channel ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/wasm/v1/proposal.ts b/packages/codegen/src/cosmwasm/wasm/v1/proposal.ts new file mode 100644 index 00000000..01d40f28 --- /dev/null +++ b/packages/codegen/src/cosmwasm/wasm/v1/proposal.ts @@ -0,0 +1,1668 @@ +import { AccessConfig, AccessConfigSDKType } from "./types"; +import { Coin, CoinSDKType } from "../../../cosmos/base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** StoreCodeProposal gov proposal content type to submit WASM code to the system */ + +export interface StoreCodeProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + + description: string; + /** RunAs is the address that is passed to the contract's environment as sender */ + + runAs: string; + /** WASMByteCode can be raw or gzip compressed */ + + wasmByteCode: Uint8Array; + /** InstantiatePermission to apply on contract creation, optional */ + + instantiatePermission?: AccessConfig; + /** UnpinCode code on upload, optional */ + + unpinCode: boolean; + /** Source is the URL where the code is hosted */ + + source: string; + /** + * Builder is the docker image used to build the code deterministically, used + * for smart contract verification + */ + + builder: string; + /** + * CodeHash is the SHA256 sum of the code outputted by builder, used for smart + * contract verification + */ + + codeHash: Uint8Array; +} +/** StoreCodeProposal gov proposal content type to submit WASM code to the system */ + +export interface StoreCodeProposalSDKType { + title: string; + description: string; + run_as: string; + wasm_byte_code: Uint8Array; + instantiate_permission?: AccessConfigSDKType; + unpin_code: boolean; + source: string; + builder: string; + code_hash: Uint8Array; +} +/** + * InstantiateContractProposal gov proposal content type to instantiate a + * contract. + */ + +export interface InstantiateContractProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + + description: string; + /** RunAs is the address that is passed to the contract's environment as sender */ + + runAs: string; + /** Admin is an optional address that can execute migrations */ + + admin: string; + /** CodeID is the reference to the stored WASM code */ + + codeId: Long; + /** Label is optional metadata to be stored with a constract instance. */ + + label: string; + /** Msg json encoded message to be passed to the contract on instantiation */ + + msg: Uint8Array; + /** Funds coins that are transferred to the contract on instantiation */ + + funds: Coin[]; +} +/** + * InstantiateContractProposal gov proposal content type to instantiate a + * contract. + */ + +export interface InstantiateContractProposalSDKType { + title: string; + description: string; + run_as: string; + admin: string; + code_id: Long; + label: string; + msg: Uint8Array; + funds: CoinSDKType[]; +} +/** + * InstantiateContract2Proposal gov proposal content type to instantiate + * contract 2 + */ + +export interface InstantiateContract2Proposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + + description: string; + /** RunAs is the address that is passed to the contract's enviroment as sender */ + + runAs: string; + /** Admin is an optional address that can execute migrations */ + + admin: string; + /** CodeID is the reference to the stored WASM code */ + + codeId: Long; + /** Label is optional metadata to be stored with a constract instance. */ + + label: string; + /** Msg json encode message to be passed to the contract on instantiation */ + + msg: Uint8Array; + /** Funds coins that are transferred to the contract on instantiation */ + + funds: Coin[]; + /** Salt is an arbitrary value provided by the sender. Size can be 1 to 64. */ + + salt: Uint8Array; + /** + * FixMsg include the msg value into the hash for the predictable address. + * Default is false + */ + + fixMsg: boolean; +} +/** + * InstantiateContract2Proposal gov proposal content type to instantiate + * contract 2 + */ + +export interface InstantiateContract2ProposalSDKType { + title: string; + description: string; + run_as: string; + admin: string; + code_id: Long; + label: string; + msg: Uint8Array; + funds: CoinSDKType[]; + salt: Uint8Array; + fix_msg: boolean; +} +/** MigrateContractProposal gov proposal content type to migrate a contract. */ + +export interface MigrateContractProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + + description: string; + /** Contract is the address of the smart contract */ + + contract: string; + /** CodeID references the new WASM code */ + + codeId: Long; + /** Msg json encoded message to be passed to the contract on migration */ + + msg: Uint8Array; +} +/** MigrateContractProposal gov proposal content type to migrate a contract. */ + +export interface MigrateContractProposalSDKType { + title: string; + description: string; + contract: string; + code_id: Long; + msg: Uint8Array; +} +/** SudoContractProposal gov proposal content type to call sudo on a contract. */ + +export interface SudoContractProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + + description: string; + /** Contract is the address of the smart contract */ + + contract: string; + /** Msg json encoded message to be passed to the contract as sudo */ + + msg: Uint8Array; +} +/** SudoContractProposal gov proposal content type to call sudo on a contract. */ + +export interface SudoContractProposalSDKType { + title: string; + description: string; + contract: string; + msg: Uint8Array; +} +/** + * ExecuteContractProposal gov proposal content type to call execute on a + * contract. + */ + +export interface ExecuteContractProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + + description: string; + /** RunAs is the address that is passed to the contract's environment as sender */ + + runAs: string; + /** Contract is the address of the smart contract */ + + contract: string; + /** Msg json encoded message to be passed to the contract as execute */ + + msg: Uint8Array; + /** Funds coins that are transferred to the contract on instantiation */ + + funds: Coin[]; +} +/** + * ExecuteContractProposal gov proposal content type to call execute on a + * contract. + */ + +export interface ExecuteContractProposalSDKType { + title: string; + description: string; + run_as: string; + contract: string; + msg: Uint8Array; + funds: CoinSDKType[]; +} +/** UpdateAdminProposal gov proposal content type to set an admin for a contract. */ + +export interface UpdateAdminProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + + description: string; + /** NewAdmin address to be set */ + + newAdmin: string; + /** Contract is the address of the smart contract */ + + contract: string; +} +/** UpdateAdminProposal gov proposal content type to set an admin for a contract. */ + +export interface UpdateAdminProposalSDKType { + title: string; + description: string; + new_admin: string; + contract: string; +} +/** + * ClearAdminProposal gov proposal content type to clear the admin of a + * contract. + */ + +export interface ClearAdminProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + + description: string; + /** Contract is the address of the smart contract */ + + contract: string; +} +/** + * ClearAdminProposal gov proposal content type to clear the admin of a + * contract. + */ + +export interface ClearAdminProposalSDKType { + title: string; + description: string; + contract: string; +} +/** + * PinCodesProposal gov proposal content type to pin a set of code ids in the + * wasmvm cache. + */ + +export interface PinCodesProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + + description: string; + /** CodeIDs references the new WASM codes */ + + codeIds: Long[]; +} +/** + * PinCodesProposal gov proposal content type to pin a set of code ids in the + * wasmvm cache. + */ + +export interface PinCodesProposalSDKType { + title: string; + description: string; + code_ids: Long[]; +} +/** + * UnpinCodesProposal gov proposal content type to unpin a set of code ids in + * the wasmvm cache. + */ + +export interface UnpinCodesProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + + description: string; + /** CodeIDs references the WASM codes */ + + codeIds: Long[]; +} +/** + * UnpinCodesProposal gov proposal content type to unpin a set of code ids in + * the wasmvm cache. + */ + +export interface UnpinCodesProposalSDKType { + title: string; + description: string; + code_ids: Long[]; +} +/** + * AccessConfigUpdate contains the code id and the access config to be + * applied. + */ + +export interface AccessConfigUpdate { + /** CodeID is the reference to the stored WASM code to be updated */ + codeId: Long; + /** InstantiatePermission to apply to the set of code ids */ + + instantiatePermission?: AccessConfig; +} +/** + * AccessConfigUpdate contains the code id and the access config to be + * applied. + */ + +export interface AccessConfigUpdateSDKType { + code_id: Long; + instantiate_permission?: AccessConfigSDKType; +} +/** + * UpdateInstantiateConfigProposal gov proposal content type to update + * instantiate config to a set of code ids. + */ + +export interface UpdateInstantiateConfigProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + + description: string; + /** + * AccessConfigUpdate contains the list of code ids and the access config + * to be applied. + */ + + accessConfigUpdates: AccessConfigUpdate[]; +} +/** + * UpdateInstantiateConfigProposal gov proposal content type to update + * instantiate config to a set of code ids. + */ + +export interface UpdateInstantiateConfigProposalSDKType { + title: string; + description: string; + access_config_updates: AccessConfigUpdateSDKType[]; +} +/** + * StoreAndInstantiateContractProposal gov proposal content type to store + * and instantiate the contract. + */ + +export interface StoreAndInstantiateContractProposal { + /** Title is a short summary */ + title: string; + /** Description is a human readable text */ + + description: string; + /** RunAs is the address that is passed to the contract's environment as sender */ + + runAs: string; + /** WASMByteCode can be raw or gzip compressed */ + + wasmByteCode: Uint8Array; + /** InstantiatePermission to apply on contract creation, optional */ + + instantiatePermission?: AccessConfig; + /** UnpinCode code on upload, optional */ + + unpinCode: boolean; + /** Admin is an optional address that can execute migrations */ + + admin: string; + /** Label is optional metadata to be stored with a constract instance. */ + + label: string; + /** Msg json encoded message to be passed to the contract on instantiation */ + + msg: Uint8Array; + /** Funds coins that are transferred to the contract on instantiation */ + + funds: Coin[]; + /** Source is the URL where the code is hosted */ + + source: string; + /** + * Builder is the docker image used to build the code deterministically, used + * for smart contract verification + */ + + builder: string; + /** + * CodeHash is the SHA256 sum of the code outputted by builder, used for smart + * contract verification + */ + + codeHash: Uint8Array; +} +/** + * StoreAndInstantiateContractProposal gov proposal content type to store + * and instantiate the contract. + */ + +export interface StoreAndInstantiateContractProposalSDKType { + title: string; + description: string; + run_as: string; + wasm_byte_code: Uint8Array; + instantiate_permission?: AccessConfigSDKType; + unpin_code: boolean; + admin: string; + label: string; + msg: Uint8Array; + funds: CoinSDKType[]; + source: string; + builder: string; + code_hash: Uint8Array; +} + +function createBaseStoreCodeProposal(): StoreCodeProposal { + return { + title: "", + description: "", + runAs: "", + wasmByteCode: new Uint8Array(), + instantiatePermission: undefined, + unpinCode: false, + source: "", + builder: "", + codeHash: new Uint8Array() + }; +} + +export const StoreCodeProposal = { + encode(message: StoreCodeProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.runAs !== "") { + writer.uint32(26).string(message.runAs); + } + + if (message.wasmByteCode.length !== 0) { + writer.uint32(34).bytes(message.wasmByteCode); + } + + if (message.instantiatePermission !== undefined) { + AccessConfig.encode(message.instantiatePermission, writer.uint32(58).fork()).ldelim(); + } + + if (message.unpinCode === true) { + writer.uint32(64).bool(message.unpinCode); + } + + if (message.source !== "") { + writer.uint32(74).string(message.source); + } + + if (message.builder !== "") { + writer.uint32(82).string(message.builder); + } + + if (message.codeHash.length !== 0) { + writer.uint32(90).bytes(message.codeHash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StoreCodeProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStoreCodeProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.runAs = reader.string(); + break; + + case 4: + message.wasmByteCode = reader.bytes(); + break; + + case 7: + message.instantiatePermission = AccessConfig.decode(reader, reader.uint32()); + break; + + case 8: + message.unpinCode = reader.bool(); + break; + + case 9: + message.source = reader.string(); + break; + + case 10: + message.builder = reader.string(); + break; + + case 11: + message.codeHash = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StoreCodeProposal { + const message = createBaseStoreCodeProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.runAs = object.runAs ?? ""; + message.wasmByteCode = object.wasmByteCode ?? new Uint8Array(); + message.instantiatePermission = object.instantiatePermission !== undefined && object.instantiatePermission !== null ? AccessConfig.fromPartial(object.instantiatePermission) : undefined; + message.unpinCode = object.unpinCode ?? false; + message.source = object.source ?? ""; + message.builder = object.builder ?? ""; + message.codeHash = object.codeHash ?? new Uint8Array(); + return message; + } + +}; + +function createBaseInstantiateContractProposal(): InstantiateContractProposal { + return { + title: "", + description: "", + runAs: "", + admin: "", + codeId: Long.UZERO, + label: "", + msg: new Uint8Array(), + funds: [] + }; +} + +export const InstantiateContractProposal = { + encode(message: InstantiateContractProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.runAs !== "") { + writer.uint32(26).string(message.runAs); + } + + if (message.admin !== "") { + writer.uint32(34).string(message.admin); + } + + if (!message.codeId.isZero()) { + writer.uint32(40).uint64(message.codeId); + } + + if (message.label !== "") { + writer.uint32(50).string(message.label); + } + + if (message.msg.length !== 0) { + writer.uint32(58).bytes(message.msg); + } + + for (const v of message.funds) { + Coin.encode(v!, writer.uint32(66).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InstantiateContractProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInstantiateContractProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.runAs = reader.string(); + break; + + case 4: + message.admin = reader.string(); + break; + + case 5: + message.codeId = (reader.uint64() as Long); + break; + + case 6: + message.label = reader.string(); + break; + + case 7: + message.msg = reader.bytes(); + break; + + case 8: + message.funds.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): InstantiateContractProposal { + const message = createBaseInstantiateContractProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.runAs = object.runAs ?? ""; + message.admin = object.admin ?? ""; + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.label = object.label ?? ""; + message.msg = object.msg ?? new Uint8Array(); + message.funds = object.funds?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseInstantiateContract2Proposal(): InstantiateContract2Proposal { + return { + title: "", + description: "", + runAs: "", + admin: "", + codeId: Long.UZERO, + label: "", + msg: new Uint8Array(), + funds: [], + salt: new Uint8Array(), + fixMsg: false + }; +} + +export const InstantiateContract2Proposal = { + encode(message: InstantiateContract2Proposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.runAs !== "") { + writer.uint32(26).string(message.runAs); + } + + if (message.admin !== "") { + writer.uint32(34).string(message.admin); + } + + if (!message.codeId.isZero()) { + writer.uint32(40).uint64(message.codeId); + } + + if (message.label !== "") { + writer.uint32(50).string(message.label); + } + + if (message.msg.length !== 0) { + writer.uint32(58).bytes(message.msg); + } + + for (const v of message.funds) { + Coin.encode(v!, writer.uint32(66).fork()).ldelim(); + } + + if (message.salt.length !== 0) { + writer.uint32(74).bytes(message.salt); + } + + if (message.fixMsg === true) { + writer.uint32(80).bool(message.fixMsg); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InstantiateContract2Proposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInstantiateContract2Proposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.runAs = reader.string(); + break; + + case 4: + message.admin = reader.string(); + break; + + case 5: + message.codeId = (reader.uint64() as Long); + break; + + case 6: + message.label = reader.string(); + break; + + case 7: + message.msg = reader.bytes(); + break; + + case 8: + message.funds.push(Coin.decode(reader, reader.uint32())); + break; + + case 9: + message.salt = reader.bytes(); + break; + + case 10: + message.fixMsg = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): InstantiateContract2Proposal { + const message = createBaseInstantiateContract2Proposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.runAs = object.runAs ?? ""; + message.admin = object.admin ?? ""; + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.label = object.label ?? ""; + message.msg = object.msg ?? new Uint8Array(); + message.funds = object.funds?.map(e => Coin.fromPartial(e)) || []; + message.salt = object.salt ?? new Uint8Array(); + message.fixMsg = object.fixMsg ?? false; + return message; + } + +}; + +function createBaseMigrateContractProposal(): MigrateContractProposal { + return { + title: "", + description: "", + contract: "", + codeId: Long.UZERO, + msg: new Uint8Array() + }; +} + +export const MigrateContractProposal = { + encode(message: MigrateContractProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.contract !== "") { + writer.uint32(34).string(message.contract); + } + + if (!message.codeId.isZero()) { + writer.uint32(40).uint64(message.codeId); + } + + if (message.msg.length !== 0) { + writer.uint32(50).bytes(message.msg); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MigrateContractProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMigrateContractProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 4: + message.contract = reader.string(); + break; + + case 5: + message.codeId = (reader.uint64() as Long); + break; + + case 6: + message.msg = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MigrateContractProposal { + const message = createBaseMigrateContractProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.contract = object.contract ?? ""; + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.msg = object.msg ?? new Uint8Array(); + return message; + } + +}; + +function createBaseSudoContractProposal(): SudoContractProposal { + return { + title: "", + description: "", + contract: "", + msg: new Uint8Array() + }; +} + +export const SudoContractProposal = { + encode(message: SudoContractProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.contract !== "") { + writer.uint32(26).string(message.contract); + } + + if (message.msg.length !== 0) { + writer.uint32(34).bytes(message.msg); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SudoContractProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSudoContractProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.contract = reader.string(); + break; + + case 4: + message.msg = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SudoContractProposal { + const message = createBaseSudoContractProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.contract = object.contract ?? ""; + message.msg = object.msg ?? new Uint8Array(); + return message; + } + +}; + +function createBaseExecuteContractProposal(): ExecuteContractProposal { + return { + title: "", + description: "", + runAs: "", + contract: "", + msg: new Uint8Array(), + funds: [] + }; +} + +export const ExecuteContractProposal = { + encode(message: ExecuteContractProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.runAs !== "") { + writer.uint32(26).string(message.runAs); + } + + if (message.contract !== "") { + writer.uint32(34).string(message.contract); + } + + if (message.msg.length !== 0) { + writer.uint32(42).bytes(message.msg); + } + + for (const v of message.funds) { + Coin.encode(v!, writer.uint32(50).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExecuteContractProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExecuteContractProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.runAs = reader.string(); + break; + + case 4: + message.contract = reader.string(); + break; + + case 5: + message.msg = reader.bytes(); + break; + + case 6: + message.funds.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ExecuteContractProposal { + const message = createBaseExecuteContractProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.runAs = object.runAs ?? ""; + message.contract = object.contract ?? ""; + message.msg = object.msg ?? new Uint8Array(); + message.funds = object.funds?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseUpdateAdminProposal(): UpdateAdminProposal { + return { + title: "", + description: "", + newAdmin: "", + contract: "" + }; +} + +export const UpdateAdminProposal = { + encode(message: UpdateAdminProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.newAdmin !== "") { + writer.uint32(26).string(message.newAdmin); + } + + if (message.contract !== "") { + writer.uint32(34).string(message.contract); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UpdateAdminProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUpdateAdminProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.newAdmin = reader.string(); + break; + + case 4: + message.contract = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): UpdateAdminProposal { + const message = createBaseUpdateAdminProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.newAdmin = object.newAdmin ?? ""; + message.contract = object.contract ?? ""; + return message; + } + +}; + +function createBaseClearAdminProposal(): ClearAdminProposal { + return { + title: "", + description: "", + contract: "" + }; +} + +export const ClearAdminProposal = { + encode(message: ClearAdminProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.contract !== "") { + writer.uint32(26).string(message.contract); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ClearAdminProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClearAdminProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.contract = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ClearAdminProposal { + const message = createBaseClearAdminProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.contract = object.contract ?? ""; + return message; + } + +}; + +function createBasePinCodesProposal(): PinCodesProposal { + return { + title: "", + description: "", + codeIds: [] + }; +} + +export const PinCodesProposal = { + encode(message: PinCodesProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + writer.uint32(26).fork(); + + for (const v of message.codeIds) { + writer.uint64(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PinCodesProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePinCodesProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.codeIds.push((reader.uint64() as Long)); + } + } else { + message.codeIds.push((reader.uint64() as Long)); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PinCodesProposal { + const message = createBasePinCodesProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.codeIds = object.codeIds?.map(e => Long.fromValue(e)) || []; + return message; + } + +}; + +function createBaseUnpinCodesProposal(): UnpinCodesProposal { + return { + title: "", + description: "", + codeIds: [] + }; +} + +export const UnpinCodesProposal = { + encode(message: UnpinCodesProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + writer.uint32(26).fork(); + + for (const v of message.codeIds) { + writer.uint64(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UnpinCodesProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUnpinCodesProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.codeIds.push((reader.uint64() as Long)); + } + } else { + message.codeIds.push((reader.uint64() as Long)); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): UnpinCodesProposal { + const message = createBaseUnpinCodesProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.codeIds = object.codeIds?.map(e => Long.fromValue(e)) || []; + return message; + } + +}; + +function createBaseAccessConfigUpdate(): AccessConfigUpdate { + return { + codeId: Long.UZERO, + instantiatePermission: undefined + }; +} + +export const AccessConfigUpdate = { + encode(message: AccessConfigUpdate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.codeId.isZero()) { + writer.uint32(8).uint64(message.codeId); + } + + if (message.instantiatePermission !== undefined) { + AccessConfig.encode(message.instantiatePermission, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AccessConfigUpdate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAccessConfigUpdate(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.codeId = (reader.uint64() as Long); + break; + + case 2: + message.instantiatePermission = AccessConfig.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AccessConfigUpdate { + const message = createBaseAccessConfigUpdate(); + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.instantiatePermission = object.instantiatePermission !== undefined && object.instantiatePermission !== null ? AccessConfig.fromPartial(object.instantiatePermission) : undefined; + return message; + } + +}; + +function createBaseUpdateInstantiateConfigProposal(): UpdateInstantiateConfigProposal { + return { + title: "", + description: "", + accessConfigUpdates: [] + }; +} + +export const UpdateInstantiateConfigProposal = { + encode(message: UpdateInstantiateConfigProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + for (const v of message.accessConfigUpdates) { + AccessConfigUpdate.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UpdateInstantiateConfigProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUpdateInstantiateConfigProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.accessConfigUpdates.push(AccessConfigUpdate.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): UpdateInstantiateConfigProposal { + const message = createBaseUpdateInstantiateConfigProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.accessConfigUpdates = object.accessConfigUpdates?.map(e => AccessConfigUpdate.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseStoreAndInstantiateContractProposal(): StoreAndInstantiateContractProposal { + return { + title: "", + description: "", + runAs: "", + wasmByteCode: new Uint8Array(), + instantiatePermission: undefined, + unpinCode: false, + admin: "", + label: "", + msg: new Uint8Array(), + funds: [], + source: "", + builder: "", + codeHash: new Uint8Array() + }; +} + +export const StoreAndInstantiateContractProposal = { + encode(message: StoreAndInstantiateContractProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.runAs !== "") { + writer.uint32(26).string(message.runAs); + } + + if (message.wasmByteCode.length !== 0) { + writer.uint32(34).bytes(message.wasmByteCode); + } + + if (message.instantiatePermission !== undefined) { + AccessConfig.encode(message.instantiatePermission, writer.uint32(42).fork()).ldelim(); + } + + if (message.unpinCode === true) { + writer.uint32(48).bool(message.unpinCode); + } + + if (message.admin !== "") { + writer.uint32(58).string(message.admin); + } + + if (message.label !== "") { + writer.uint32(66).string(message.label); + } + + if (message.msg.length !== 0) { + writer.uint32(74).bytes(message.msg); + } + + for (const v of message.funds) { + Coin.encode(v!, writer.uint32(82).fork()).ldelim(); + } + + if (message.source !== "") { + writer.uint32(90).string(message.source); + } + + if (message.builder !== "") { + writer.uint32(98).string(message.builder); + } + + if (message.codeHash.length !== 0) { + writer.uint32(106).bytes(message.codeHash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StoreAndInstantiateContractProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStoreAndInstantiateContractProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.runAs = reader.string(); + break; + + case 4: + message.wasmByteCode = reader.bytes(); + break; + + case 5: + message.instantiatePermission = AccessConfig.decode(reader, reader.uint32()); + break; + + case 6: + message.unpinCode = reader.bool(); + break; + + case 7: + message.admin = reader.string(); + break; + + case 8: + message.label = reader.string(); + break; + + case 9: + message.msg = reader.bytes(); + break; + + case 10: + message.funds.push(Coin.decode(reader, reader.uint32())); + break; + + case 11: + message.source = reader.string(); + break; + + case 12: + message.builder = reader.string(); + break; + + case 13: + message.codeHash = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): StoreAndInstantiateContractProposal { + const message = createBaseStoreAndInstantiateContractProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.runAs = object.runAs ?? ""; + message.wasmByteCode = object.wasmByteCode ?? new Uint8Array(); + message.instantiatePermission = object.instantiatePermission !== undefined && object.instantiatePermission !== null ? AccessConfig.fromPartial(object.instantiatePermission) : undefined; + message.unpinCode = object.unpinCode ?? false; + message.admin = object.admin ?? ""; + message.label = object.label ?? ""; + message.msg = object.msg ?? new Uint8Array(); + message.funds = object.funds?.map(e => Coin.fromPartial(e)) || []; + message.source = object.source ?? ""; + message.builder = object.builder ?? ""; + message.codeHash = object.codeHash ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/wasm/v1/query.lcd.ts b/packages/codegen/src/cosmwasm/wasm/v1/query.lcd.ts new file mode 100644 index 00000000..8c8ac583 --- /dev/null +++ b/packages/codegen/src/cosmwasm/wasm/v1/query.lcd.ts @@ -0,0 +1,155 @@ +import { setPaginationParams } from "../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryContractInfoRequest, QueryContractInfoResponseSDKType, QueryContractHistoryRequest, QueryContractHistoryResponseSDKType, QueryContractsByCodeRequest, QueryContractsByCodeResponseSDKType, QueryAllContractStateRequest, QueryAllContractStateResponseSDKType, QueryRawContractStateRequest, QueryRawContractStateResponseSDKType, QuerySmartContractStateRequest, QuerySmartContractStateResponseSDKType, QueryCodeRequest, QueryCodeResponseSDKType, QueryCodesRequest, QueryCodesResponseSDKType, QueryPinnedCodesRequest, QueryPinnedCodesResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType, QueryContractsByCreatorRequest, QueryContractsByCreatorResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.contractInfo = this.contractInfo.bind(this); + this.contractHistory = this.contractHistory.bind(this); + this.contractsByCode = this.contractsByCode.bind(this); + this.allContractState = this.allContractState.bind(this); + this.rawContractState = this.rawContractState.bind(this); + this.smartContractState = this.smartContractState.bind(this); + this.code = this.code.bind(this); + this.codes = this.codes.bind(this); + this.pinnedCodes = this.pinnedCodes.bind(this); + this.params = this.params.bind(this); + this.contractsByCreator = this.contractsByCreator.bind(this); + } + /* ContractInfo gets the contract meta data */ + + + async contractInfo(params: QueryContractInfoRequest): Promise { + const endpoint = `cosmwasm/wasm/v1/contract/${params.address}`; + return await this.req.get(endpoint); + } + /* ContractHistory gets the contract code history */ + + + async contractHistory(params: QueryContractHistoryRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmwasm/wasm/v1/contract/${params.address}/history`; + return await this.req.get(endpoint, options); + } + /* ContractsByCode lists all smart contracts for a code id */ + + + async contractsByCode(params: QueryContractsByCodeRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmwasm/wasm/v1/code/${params.codeId}/contracts`; + return await this.req.get(endpoint, options); + } + /* AllContractState gets all raw store data for a single contract */ + + + async allContractState(params: QueryAllContractStateRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmwasm/wasm/v1/contract/${params.address}/state`; + return await this.req.get(endpoint, options); + } + /* RawContractState gets single key from the raw store data of a contract */ + + + async rawContractState(params: QueryRawContractStateRequest): Promise { + const endpoint = `cosmwasm/wasm/v1/contract/${params.address}/raw/${params.queryData}`; + return await this.req.get(endpoint); + } + /* SmartContractState get smart query result from the contract */ + + + async smartContractState(params: QuerySmartContractStateRequest): Promise { + const endpoint = `cosmwasm/wasm/v1/contract/${params.address}/smart/${params.queryData}`; + return await this.req.get(endpoint); + } + /* Code gets the binary code and metadata for a singe wasm code */ + + + async code(params: QueryCodeRequest): Promise { + const endpoint = `cosmwasm/wasm/v1/code/${params.codeId}`; + return await this.req.get(endpoint); + } + /* Codes gets the metadata for all stored wasm codes */ + + + async codes(params: QueryCodesRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmwasm/wasm/v1/code`; + return await this.req.get(endpoint, options); + } + /* PinnedCodes gets the pinned code ids */ + + + async pinnedCodes(params: QueryPinnedCodesRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmwasm/wasm/v1/codes/pinned`; + return await this.req.get(endpoint, options); + } + /* Params gets the module params */ + + + async params(_params: QueryParamsRequest = {}): Promise { + const endpoint = `cosmwasm/wasm/v1/codes/params`; + return await this.req.get(endpoint); + } + /* ContractsByCreator gets the contracts by creator */ + + + async contractsByCreator(params: QueryContractsByCreatorRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `cosmwasm/wasm/v1/contracts/creator/${params.creatorAddress}`; + return await this.req.get(endpoint, options); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/wasm/v1/query.rpc.Query.ts b/packages/codegen/src/cosmwasm/wasm/v1/query.rpc.Query.ts new file mode 100644 index 00000000..2ba1edee --- /dev/null +++ b/packages/codegen/src/cosmwasm/wasm/v1/query.rpc.Query.ts @@ -0,0 +1,179 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryContractInfoRequest, QueryContractInfoResponse, QueryContractHistoryRequest, QueryContractHistoryResponse, QueryContractsByCodeRequest, QueryContractsByCodeResponse, QueryAllContractStateRequest, QueryAllContractStateResponse, QueryRawContractStateRequest, QueryRawContractStateResponse, QuerySmartContractStateRequest, QuerySmartContractStateResponse, QueryCodeRequest, QueryCodeResponse, QueryCodesRequest, QueryCodesResponse, QueryPinnedCodesRequest, QueryPinnedCodesResponse, QueryParamsRequest, QueryParamsResponse, QueryContractsByCreatorRequest, QueryContractsByCreatorResponse } from "./query"; +/** Query provides defines the gRPC querier service */ + +export interface Query { + /** ContractInfo gets the contract meta data */ + contractInfo(request: QueryContractInfoRequest): Promise; + /** ContractHistory gets the contract code history */ + + contractHistory(request: QueryContractHistoryRequest): Promise; + /** ContractsByCode lists all smart contracts for a code id */ + + contractsByCode(request: QueryContractsByCodeRequest): Promise; + /** AllContractState gets all raw store data for a single contract */ + + allContractState(request: QueryAllContractStateRequest): Promise; + /** RawContractState gets single key from the raw store data of a contract */ + + rawContractState(request: QueryRawContractStateRequest): Promise; + /** SmartContractState get smart query result from the contract */ + + smartContractState(request: QuerySmartContractStateRequest): Promise; + /** Code gets the binary code and metadata for a singe wasm code */ + + code(request: QueryCodeRequest): Promise; + /** Codes gets the metadata for all stored wasm codes */ + + codes(request?: QueryCodesRequest): Promise; + /** PinnedCodes gets the pinned code ids */ + + pinnedCodes(request?: QueryPinnedCodesRequest): Promise; + /** Params gets the module params */ + + params(request?: QueryParamsRequest): Promise; + /** ContractsByCreator gets the contracts by creator */ + + contractsByCreator(request: QueryContractsByCreatorRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.contractInfo = this.contractInfo.bind(this); + this.contractHistory = this.contractHistory.bind(this); + this.contractsByCode = this.contractsByCode.bind(this); + this.allContractState = this.allContractState.bind(this); + this.rawContractState = this.rawContractState.bind(this); + this.smartContractState = this.smartContractState.bind(this); + this.code = this.code.bind(this); + this.codes = this.codes.bind(this); + this.pinnedCodes = this.pinnedCodes.bind(this); + this.params = this.params.bind(this); + this.contractsByCreator = this.contractsByCreator.bind(this); + } + + contractInfo(request: QueryContractInfoRequest): Promise { + const data = QueryContractInfoRequest.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Query", "ContractInfo", data); + return promise.then(data => QueryContractInfoResponse.decode(new _m0.Reader(data))); + } + + contractHistory(request: QueryContractHistoryRequest): Promise { + const data = QueryContractHistoryRequest.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Query", "ContractHistory", data); + return promise.then(data => QueryContractHistoryResponse.decode(new _m0.Reader(data))); + } + + contractsByCode(request: QueryContractsByCodeRequest): Promise { + const data = QueryContractsByCodeRequest.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Query", "ContractsByCode", data); + return promise.then(data => QueryContractsByCodeResponse.decode(new _m0.Reader(data))); + } + + allContractState(request: QueryAllContractStateRequest): Promise { + const data = QueryAllContractStateRequest.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Query", "AllContractState", data); + return promise.then(data => QueryAllContractStateResponse.decode(new _m0.Reader(data))); + } + + rawContractState(request: QueryRawContractStateRequest): Promise { + const data = QueryRawContractStateRequest.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Query", "RawContractState", data); + return promise.then(data => QueryRawContractStateResponse.decode(new _m0.Reader(data))); + } + + smartContractState(request: QuerySmartContractStateRequest): Promise { + const data = QuerySmartContractStateRequest.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Query", "SmartContractState", data); + return promise.then(data => QuerySmartContractStateResponse.decode(new _m0.Reader(data))); + } + + code(request: QueryCodeRequest): Promise { + const data = QueryCodeRequest.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Query", "Code", data); + return promise.then(data => QueryCodeResponse.decode(new _m0.Reader(data))); + } + + codes(request: QueryCodesRequest = { + pagination: undefined + }): Promise { + const data = QueryCodesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Query", "Codes", data); + return promise.then(data => QueryCodesResponse.decode(new _m0.Reader(data))); + } + + pinnedCodes(request: QueryPinnedCodesRequest = { + pagination: undefined + }): Promise { + const data = QueryPinnedCodesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Query", "PinnedCodes", data); + return promise.then(data => QueryPinnedCodesResponse.decode(new _m0.Reader(data))); + } + + params(request: QueryParamsRequest = {}): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + contractsByCreator(request: QueryContractsByCreatorRequest): Promise { + const data = QueryContractsByCreatorRequest.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Query", "ContractsByCreator", data); + return promise.then(data => QueryContractsByCreatorResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + contractInfo(request: QueryContractInfoRequest): Promise { + return queryService.contractInfo(request); + }, + + contractHistory(request: QueryContractHistoryRequest): Promise { + return queryService.contractHistory(request); + }, + + contractsByCode(request: QueryContractsByCodeRequest): Promise { + return queryService.contractsByCode(request); + }, + + allContractState(request: QueryAllContractStateRequest): Promise { + return queryService.allContractState(request); + }, + + rawContractState(request: QueryRawContractStateRequest): Promise { + return queryService.rawContractState(request); + }, + + smartContractState(request: QuerySmartContractStateRequest): Promise { + return queryService.smartContractState(request); + }, + + code(request: QueryCodeRequest): Promise { + return queryService.code(request); + }, + + codes(request?: QueryCodesRequest): Promise { + return queryService.codes(request); + }, + + pinnedCodes(request?: QueryPinnedCodesRequest): Promise { + return queryService.pinnedCodes(request); + }, + + params(request?: QueryParamsRequest): Promise { + return queryService.params(request); + }, + + contractsByCreator(request: QueryContractsByCreatorRequest): Promise { + return queryService.contractsByCreator(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/wasm/v1/query.ts b/packages/codegen/src/cosmwasm/wasm/v1/query.ts new file mode 100644 index 00000000..a1fe5361 --- /dev/null +++ b/packages/codegen/src/cosmwasm/wasm/v1/query.ts @@ -0,0 +1,1606 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../../cosmos/base/query/v1beta1/pagination"; +import { ContractInfo, ContractInfoSDKType, ContractCodeHistoryEntry, ContractCodeHistoryEntrySDKType, Model, ModelSDKType, AccessConfig, AccessConfigSDKType, Params, ParamsSDKType } from "./types"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** + * QueryContractInfoRequest is the request type for the Query/ContractInfo RPC + * method + */ + +export interface QueryContractInfoRequest { + /** address is the address of the contract to query */ + address: string; +} +/** + * QueryContractInfoRequest is the request type for the Query/ContractInfo RPC + * method + */ + +export interface QueryContractInfoRequestSDKType { + address: string; +} +/** + * QueryContractInfoResponse is the response type for the Query/ContractInfo RPC + * method + */ + +export interface QueryContractInfoResponse { + /** address is the address of the contract */ + address: string; + contractInfo?: ContractInfo; +} +/** + * QueryContractInfoResponse is the response type for the Query/ContractInfo RPC + * method + */ + +export interface QueryContractInfoResponseSDKType { + address: string; + contract_info?: ContractInfoSDKType; +} +/** + * QueryContractHistoryRequest is the request type for the Query/ContractHistory + * RPC method + */ + +export interface QueryContractHistoryRequest { + /** address is the address of the contract to query */ + address: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** + * QueryContractHistoryRequest is the request type for the Query/ContractHistory + * RPC method + */ + +export interface QueryContractHistoryRequestSDKType { + address: string; + pagination?: PageRequestSDKType; +} +/** + * QueryContractHistoryResponse is the response type for the + * Query/ContractHistory RPC method + */ + +export interface QueryContractHistoryResponse { + entries: ContractCodeHistoryEntry[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryContractHistoryResponse is the response type for the + * Query/ContractHistory RPC method + */ + +export interface QueryContractHistoryResponseSDKType { + entries: ContractCodeHistoryEntrySDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryContractsByCodeRequest is the request type for the Query/ContractsByCode + * RPC method + */ + +export interface QueryContractsByCodeRequest { + /** + * grpc-gateway_out does not support Go style CodID + * pagination defines an optional pagination for the request. + */ + codeId: Long; + pagination?: PageRequest; +} +/** + * QueryContractsByCodeRequest is the request type for the Query/ContractsByCode + * RPC method + */ + +export interface QueryContractsByCodeRequestSDKType { + code_id: Long; + pagination?: PageRequestSDKType; +} +/** + * QueryContractsByCodeResponse is the response type for the + * Query/ContractsByCode RPC method + */ + +export interface QueryContractsByCodeResponse { + /** contracts are a set of contract addresses */ + contracts: string[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryContractsByCodeResponse is the response type for the + * Query/ContractsByCode RPC method + */ + +export interface QueryContractsByCodeResponseSDKType { + contracts: string[]; + pagination?: PageResponseSDKType; +} +/** + * QueryAllContractStateRequest is the request type for the + * Query/AllContractState RPC method + */ + +export interface QueryAllContractStateRequest { + /** address is the address of the contract */ + address: string; + /** pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** + * QueryAllContractStateRequest is the request type for the + * Query/AllContractState RPC method + */ + +export interface QueryAllContractStateRequestSDKType { + address: string; + pagination?: PageRequestSDKType; +} +/** + * QueryAllContractStateResponse is the response type for the + * Query/AllContractState RPC method + */ + +export interface QueryAllContractStateResponse { + models: Model[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryAllContractStateResponse is the response type for the + * Query/AllContractState RPC method + */ + +export interface QueryAllContractStateResponseSDKType { + models: ModelSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryRawContractStateRequest is the request type for the + * Query/RawContractState RPC method + */ + +export interface QueryRawContractStateRequest { + /** address is the address of the contract */ + address: string; + queryData: Uint8Array; +} +/** + * QueryRawContractStateRequest is the request type for the + * Query/RawContractState RPC method + */ + +export interface QueryRawContractStateRequestSDKType { + address: string; + query_data: Uint8Array; +} +/** + * QueryRawContractStateResponse is the response type for the + * Query/RawContractState RPC method + */ + +export interface QueryRawContractStateResponse { + /** Data contains the raw store data */ + data: Uint8Array; +} +/** + * QueryRawContractStateResponse is the response type for the + * Query/RawContractState RPC method + */ + +export interface QueryRawContractStateResponseSDKType { + data: Uint8Array; +} +/** + * QuerySmartContractStateRequest is the request type for the + * Query/SmartContractState RPC method + */ + +export interface QuerySmartContractStateRequest { + /** address is the address of the contract */ + address: string; + /** QueryData contains the query data passed to the contract */ + + queryData: Uint8Array; +} +/** + * QuerySmartContractStateRequest is the request type for the + * Query/SmartContractState RPC method + */ + +export interface QuerySmartContractStateRequestSDKType { + address: string; + query_data: Uint8Array; +} +/** + * QuerySmartContractStateResponse is the response type for the + * Query/SmartContractState RPC method + */ + +export interface QuerySmartContractStateResponse { + /** Data contains the json data returned from the smart contract */ + data: Uint8Array; +} +/** + * QuerySmartContractStateResponse is the response type for the + * Query/SmartContractState RPC method + */ + +export interface QuerySmartContractStateResponseSDKType { + data: Uint8Array; +} +/** QueryCodeRequest is the request type for the Query/Code RPC method */ + +export interface QueryCodeRequest { + /** grpc-gateway_out does not support Go style CodID */ + codeId: Long; +} +/** QueryCodeRequest is the request type for the Query/Code RPC method */ + +export interface QueryCodeRequestSDKType { + code_id: Long; +} +/** CodeInfoResponse contains code meta data from CodeInfo */ + +export interface CodeInfoResponse { + codeId: Long; + creator: string; + dataHash: Uint8Array; + instantiatePermission?: AccessConfig; +} +/** CodeInfoResponse contains code meta data from CodeInfo */ + +export interface CodeInfoResponseSDKType { + code_id: Long; + creator: string; + data_hash: Uint8Array; + instantiate_permission?: AccessConfigSDKType; +} +/** QueryCodeResponse is the response type for the Query/Code RPC method */ + +export interface QueryCodeResponse { + codeInfo?: CodeInfoResponse; + data: Uint8Array; +} +/** QueryCodeResponse is the response type for the Query/Code RPC method */ + +export interface QueryCodeResponseSDKType { + code_info?: CodeInfoResponseSDKType; + data: Uint8Array; +} +/** QueryCodesRequest is the request type for the Query/Codes RPC method */ + +export interface QueryCodesRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** QueryCodesRequest is the request type for the Query/Codes RPC method */ + +export interface QueryCodesRequestSDKType { + pagination?: PageRequestSDKType; +} +/** QueryCodesResponse is the response type for the Query/Codes RPC method */ + +export interface QueryCodesResponse { + codeInfos: CodeInfoResponse[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** QueryCodesResponse is the response type for the Query/Codes RPC method */ + +export interface QueryCodesResponseSDKType { + code_infos: CodeInfoResponseSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryPinnedCodesRequest is the request type for the Query/PinnedCodes + * RPC method + */ + +export interface QueryPinnedCodesRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryPinnedCodesRequest is the request type for the Query/PinnedCodes + * RPC method + */ + +export interface QueryPinnedCodesRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryPinnedCodesResponse is the response type for the + * Query/PinnedCodes RPC method + */ + +export interface QueryPinnedCodesResponse { + codeIds: Long[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryPinnedCodesResponse is the response type for the + * Query/PinnedCodes RPC method + */ + +export interface QueryPinnedCodesResponseSDKType { + code_ids: Long[]; + pagination?: PageResponseSDKType; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequest {} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequestSDKType {} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params?: Params; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} +/** + * QueryContractsByCreatorRequest is the request type for the + * Query/ContractsByCreator RPC method. + */ + +export interface QueryContractsByCreatorRequest { + /** CreatorAddress is the address of contract creator */ + creatorAddress: string; + /** Pagination defines an optional pagination for the request. */ + + pagination?: PageRequest; +} +/** + * QueryContractsByCreatorRequest is the request type for the + * Query/ContractsByCreator RPC method. + */ + +export interface QueryContractsByCreatorRequestSDKType { + creator_address: string; + pagination?: PageRequestSDKType; +} +/** + * QueryContractsByCreatorResponse is the response type for the + * Query/ContractsByCreator RPC method. + */ + +export interface QueryContractsByCreatorResponse { + /** ContractAddresses result set */ + contractAddresses: string[]; + /** Pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryContractsByCreatorResponse is the response type for the + * Query/ContractsByCreator RPC method. + */ + +export interface QueryContractsByCreatorResponseSDKType { + contract_addresses: string[]; + pagination?: PageResponseSDKType; +} + +function createBaseQueryContractInfoRequest(): QueryContractInfoRequest { + return { + address: "" + }; +} + +export const QueryContractInfoRequest = { + encode(message: QueryContractInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryContractInfoRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryContractInfoRequest { + const message = createBaseQueryContractInfoRequest(); + message.address = object.address ?? ""; + return message; + } + +}; + +function createBaseQueryContractInfoResponse(): QueryContractInfoResponse { + return { + address: "", + contractInfo: undefined + }; +} + +export const QueryContractInfoResponse = { + encode(message: QueryContractInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.contractInfo !== undefined) { + ContractInfo.encode(message.contractInfo, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryContractInfoResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.contractInfo = ContractInfo.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryContractInfoResponse { + const message = createBaseQueryContractInfoResponse(); + message.address = object.address ?? ""; + message.contractInfo = object.contractInfo !== undefined && object.contractInfo !== null ? ContractInfo.fromPartial(object.contractInfo) : undefined; + return message; + } + +}; + +function createBaseQueryContractHistoryRequest(): QueryContractHistoryRequest { + return { + address: "", + pagination: undefined + }; +} + +export const QueryContractHistoryRequest = { + encode(message: QueryContractHistoryRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractHistoryRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryContractHistoryRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryContractHistoryRequest { + const message = createBaseQueryContractHistoryRequest(); + message.address = object.address ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryContractHistoryResponse(): QueryContractHistoryResponse { + return { + entries: [], + pagination: undefined + }; +} + +export const QueryContractHistoryResponse = { + encode(message: QueryContractHistoryResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.entries) { + ContractCodeHistoryEntry.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractHistoryResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryContractHistoryResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.entries.push(ContractCodeHistoryEntry.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryContractHistoryResponse { + const message = createBaseQueryContractHistoryResponse(); + message.entries = object.entries?.map(e => ContractCodeHistoryEntry.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryContractsByCodeRequest(): QueryContractsByCodeRequest { + return { + codeId: Long.UZERO, + pagination: undefined + }; +} + +export const QueryContractsByCodeRequest = { + encode(message: QueryContractsByCodeRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.codeId.isZero()) { + writer.uint32(8).uint64(message.codeId); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractsByCodeRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryContractsByCodeRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.codeId = (reader.uint64() as Long); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryContractsByCodeRequest { + const message = createBaseQueryContractsByCodeRequest(); + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryContractsByCodeResponse(): QueryContractsByCodeResponse { + return { + contracts: [], + pagination: undefined + }; +} + +export const QueryContractsByCodeResponse = { + encode(message: QueryContractsByCodeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.contracts) { + writer.uint32(10).string(v!); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractsByCodeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryContractsByCodeResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.contracts.push(reader.string()); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryContractsByCodeResponse { + const message = createBaseQueryContractsByCodeResponse(); + message.contracts = object.contracts?.map(e => e) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryAllContractStateRequest(): QueryAllContractStateRequest { + return { + address: "", + pagination: undefined + }; +} + +export const QueryAllContractStateRequest = { + encode(message: QueryAllContractStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllContractStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllContractStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllContractStateRequest { + const message = createBaseQueryAllContractStateRequest(); + message.address = object.address ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryAllContractStateResponse(): QueryAllContractStateResponse { + return { + models: [], + pagination: undefined + }; +} + +export const QueryAllContractStateResponse = { + encode(message: QueryAllContractStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.models) { + Model.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllContractStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllContractStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.models.push(Model.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAllContractStateResponse { + const message = createBaseQueryAllContractStateResponse(); + message.models = object.models?.map(e => Model.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryRawContractStateRequest(): QueryRawContractStateRequest { + return { + address: "", + queryData: new Uint8Array() + }; +} + +export const QueryRawContractStateRequest = { + encode(message: QueryRawContractStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.queryData.length !== 0) { + writer.uint32(18).bytes(message.queryData); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryRawContractStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryRawContractStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.queryData = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryRawContractStateRequest { + const message = createBaseQueryRawContractStateRequest(); + message.address = object.address ?? ""; + message.queryData = object.queryData ?? new Uint8Array(); + return message; + } + +}; + +function createBaseQueryRawContractStateResponse(): QueryRawContractStateResponse { + return { + data: new Uint8Array() + }; +} + +export const QueryRawContractStateResponse = { + encode(message: QueryRawContractStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data.length !== 0) { + writer.uint32(10).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryRawContractStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryRawContractStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryRawContractStateResponse { + const message = createBaseQueryRawContractStateResponse(); + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseQuerySmartContractStateRequest(): QuerySmartContractStateRequest { + return { + address: "", + queryData: new Uint8Array() + }; +} + +export const QuerySmartContractStateRequest = { + encode(message: QuerySmartContractStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.queryData.length !== 0) { + writer.uint32(18).bytes(message.queryData); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySmartContractStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySmartContractStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.queryData = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySmartContractStateRequest { + const message = createBaseQuerySmartContractStateRequest(); + message.address = object.address ?? ""; + message.queryData = object.queryData ?? new Uint8Array(); + return message; + } + +}; + +function createBaseQuerySmartContractStateResponse(): QuerySmartContractStateResponse { + return { + data: new Uint8Array() + }; +} + +export const QuerySmartContractStateResponse = { + encode(message: QuerySmartContractStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data.length !== 0) { + writer.uint32(10).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySmartContractStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySmartContractStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QuerySmartContractStateResponse { + const message = createBaseQuerySmartContractStateResponse(); + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseQueryCodeRequest(): QueryCodeRequest { + return { + codeId: Long.UZERO + }; +} + +export const QueryCodeRequest = { + encode(message: QueryCodeRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.codeId.isZero()) { + writer.uint32(8).uint64(message.codeId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCodeRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCodeRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.codeId = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryCodeRequest { + const message = createBaseQueryCodeRequest(); + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + return message; + } + +}; + +function createBaseCodeInfoResponse(): CodeInfoResponse { + return { + codeId: Long.UZERO, + creator: "", + dataHash: new Uint8Array(), + instantiatePermission: undefined + }; +} + +export const CodeInfoResponse = { + encode(message: CodeInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.codeId.isZero()) { + writer.uint32(8).uint64(message.codeId); + } + + if (message.creator !== "") { + writer.uint32(18).string(message.creator); + } + + if (message.dataHash.length !== 0) { + writer.uint32(26).bytes(message.dataHash); + } + + if (message.instantiatePermission !== undefined) { + AccessConfig.encode(message.instantiatePermission, writer.uint32(50).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CodeInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCodeInfoResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.codeId = (reader.uint64() as Long); + break; + + case 2: + message.creator = reader.string(); + break; + + case 3: + message.dataHash = reader.bytes(); + break; + + case 6: + message.instantiatePermission = AccessConfig.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CodeInfoResponse { + const message = createBaseCodeInfoResponse(); + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.creator = object.creator ?? ""; + message.dataHash = object.dataHash ?? new Uint8Array(); + message.instantiatePermission = object.instantiatePermission !== undefined && object.instantiatePermission !== null ? AccessConfig.fromPartial(object.instantiatePermission) : undefined; + return message; + } + +}; + +function createBaseQueryCodeResponse(): QueryCodeResponse { + return { + codeInfo: undefined, + data: new Uint8Array() + }; +} + +export const QueryCodeResponse = { + encode(message: QueryCodeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.codeInfo !== undefined) { + CodeInfoResponse.encode(message.codeInfo, writer.uint32(10).fork()).ldelim(); + } + + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCodeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCodeResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.codeInfo = CodeInfoResponse.decode(reader, reader.uint32()); + break; + + case 2: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryCodeResponse { + const message = createBaseQueryCodeResponse(); + message.codeInfo = object.codeInfo !== undefined && object.codeInfo !== null ? CodeInfoResponse.fromPartial(object.codeInfo) : undefined; + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseQueryCodesRequest(): QueryCodesRequest { + return { + pagination: undefined + }; +} + +export const QueryCodesRequest = { + encode(message: QueryCodesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCodesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCodesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryCodesRequest { + const message = createBaseQueryCodesRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryCodesResponse(): QueryCodesResponse { + return { + codeInfos: [], + pagination: undefined + }; +} + +export const QueryCodesResponse = { + encode(message: QueryCodesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.codeInfos) { + CodeInfoResponse.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCodesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCodesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.codeInfos.push(CodeInfoResponse.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryCodesResponse { + const message = createBaseQueryCodesResponse(); + message.codeInfos = object.codeInfos?.map(e => CodeInfoResponse.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryPinnedCodesRequest(): QueryPinnedCodesRequest { + return { + pagination: undefined + }; +} + +export const QueryPinnedCodesRequest = { + encode(message: QueryPinnedCodesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPinnedCodesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPinnedCodesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPinnedCodesRequest { + const message = createBaseQueryPinnedCodesRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryPinnedCodesResponse(): QueryPinnedCodesResponse { + return { + codeIds: [], + pagination: undefined + }; +} + +export const QueryPinnedCodesResponse = { + encode(message: QueryPinnedCodesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + + for (const v of message.codeIds) { + writer.uint64(v); + } + + writer.ldelim(); + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPinnedCodesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPinnedCodesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.codeIds.push((reader.uint64() as Long)); + } + } else { + message.codeIds.push((reader.uint64() as Long)); + } + + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPinnedCodesResponse { + const message = createBaseQueryPinnedCodesResponse(); + message.codeIds = object.codeIds?.map(e => Long.fromValue(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + params: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseQueryContractsByCreatorRequest(): QueryContractsByCreatorRequest { + return { + creatorAddress: "", + pagination: undefined + }; +} + +export const QueryContractsByCreatorRequest = { + encode(message: QueryContractsByCreatorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creatorAddress !== "") { + writer.uint32(10).string(message.creatorAddress); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractsByCreatorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryContractsByCreatorRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.creatorAddress = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryContractsByCreatorRequest { + const message = createBaseQueryContractsByCreatorRequest(); + message.creatorAddress = object.creatorAddress ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryContractsByCreatorResponse(): QueryContractsByCreatorResponse { + return { + contractAddresses: [], + pagination: undefined + }; +} + +export const QueryContractsByCreatorResponse = { + encode(message: QueryContractsByCreatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.contractAddresses) { + writer.uint32(10).string(v!); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryContractsByCreatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryContractsByCreatorResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.contractAddresses.push(reader.string()); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryContractsByCreatorResponse { + const message = createBaseQueryContractsByCreatorResponse(); + message.contractAddresses = object.contractAddresses?.map(e => e) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/wasm/v1/tx.rpc.msg.ts b/packages/codegen/src/cosmwasm/wasm/v1/tx.rpc.msg.ts new file mode 100644 index 00000000..4bd52e13 --- /dev/null +++ b/packages/codegen/src/cosmwasm/wasm/v1/tx.rpc.msg.ts @@ -0,0 +1,90 @@ +import { Rpc } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgStoreCode, MsgStoreCodeResponse, MsgInstantiateContract, MsgInstantiateContractResponse, MsgInstantiateContract2, MsgInstantiateContract2Response, MsgExecuteContract, MsgExecuteContractResponse, MsgMigrateContract, MsgMigrateContractResponse, MsgUpdateAdmin, MsgUpdateAdminResponse, MsgClearAdmin, MsgClearAdminResponse } from "./tx"; +/** Msg defines the wasm Msg service. */ + +export interface Msg { + /** StoreCode to submit Wasm code to the system */ + storeCode(request: MsgStoreCode): Promise; + /** + * InstantiateContract creates a new smart contract instance for the given + * code id. + */ + + instantiateContract(request: MsgInstantiateContract): Promise; + /** + * InstantiateContract2 creates a new smart contract instance for the given + * code id with a predictable address + */ + + instantiateContract2(request: MsgInstantiateContract2): Promise; + /** Execute submits the given message data to a smart contract */ + + executeContract(request: MsgExecuteContract): Promise; + /** Migrate runs a code upgrade/ downgrade for a smart contract */ + + migrateContract(request: MsgMigrateContract): Promise; + /** UpdateAdmin sets a new admin for a smart contract */ + + updateAdmin(request: MsgUpdateAdmin): Promise; + /** ClearAdmin removes any admin stored for a smart contract */ + + clearAdmin(request: MsgClearAdmin): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.storeCode = this.storeCode.bind(this); + this.instantiateContract = this.instantiateContract.bind(this); + this.instantiateContract2 = this.instantiateContract2.bind(this); + this.executeContract = this.executeContract.bind(this); + this.migrateContract = this.migrateContract.bind(this); + this.updateAdmin = this.updateAdmin.bind(this); + this.clearAdmin = this.clearAdmin.bind(this); + } + + storeCode(request: MsgStoreCode): Promise { + const data = MsgStoreCode.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Msg", "StoreCode", data); + return promise.then(data => MsgStoreCodeResponse.decode(new _m0.Reader(data))); + } + + instantiateContract(request: MsgInstantiateContract): Promise { + const data = MsgInstantiateContract.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Msg", "InstantiateContract", data); + return promise.then(data => MsgInstantiateContractResponse.decode(new _m0.Reader(data))); + } + + instantiateContract2(request: MsgInstantiateContract2): Promise { + const data = MsgInstantiateContract2.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Msg", "InstantiateContract2", data); + return promise.then(data => MsgInstantiateContract2Response.decode(new _m0.Reader(data))); + } + + executeContract(request: MsgExecuteContract): Promise { + const data = MsgExecuteContract.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Msg", "ExecuteContract", data); + return promise.then(data => MsgExecuteContractResponse.decode(new _m0.Reader(data))); + } + + migrateContract(request: MsgMigrateContract): Promise { + const data = MsgMigrateContract.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Msg", "MigrateContract", data); + return promise.then(data => MsgMigrateContractResponse.decode(new _m0.Reader(data))); + } + + updateAdmin(request: MsgUpdateAdmin): Promise { + const data = MsgUpdateAdmin.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Msg", "UpdateAdmin", data); + return promise.then(data => MsgUpdateAdminResponse.decode(new _m0.Reader(data))); + } + + clearAdmin(request: MsgClearAdmin): Promise { + const data = MsgClearAdmin.encode(request).finish(); + const promise = this.rpc.request("cosmwasm.wasm.v1.Msg", "ClearAdmin", data); + return promise.then(data => MsgClearAdminResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/wasm/v1/tx.ts b/packages/codegen/src/cosmwasm/wasm/v1/tx.ts new file mode 100644 index 00000000..6c10e880 --- /dev/null +++ b/packages/codegen/src/cosmwasm/wasm/v1/tx.ts @@ -0,0 +1,1141 @@ +import { AccessConfig, AccessConfigSDKType } from "./types"; +import { Coin, CoinSDKType } from "../../../cosmos/base/v1beta1/coin"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** MsgStoreCode submit Wasm code to the system */ + +export interface MsgStoreCode { + /** Sender is the that actor that signed the messages */ + sender: string; + /** WASMByteCode can be raw or gzip compressed */ + + wasmByteCode: Uint8Array; + /** + * InstantiatePermission access control to apply on contract creation, + * optional + */ + + instantiatePermission?: AccessConfig; +} +/** MsgStoreCode submit Wasm code to the system */ + +export interface MsgStoreCodeSDKType { + sender: string; + wasm_byte_code: Uint8Array; + instantiate_permission?: AccessConfigSDKType; +} +/** MsgStoreCodeResponse returns store result data. */ + +export interface MsgStoreCodeResponse { + /** CodeID is the reference to the stored WASM code */ + codeId: Long; + /** Checksum is the sha256 hash of the stored code */ + + checksum: Uint8Array; +} +/** MsgStoreCodeResponse returns store result data. */ + +export interface MsgStoreCodeResponseSDKType { + code_id: Long; + checksum: Uint8Array; +} +/** + * MsgInstantiateContract create a new smart contract instance for the given + * code id. + */ + +export interface MsgInstantiateContract { + /** Sender is the that actor that signed the messages */ + sender: string; + /** Admin is an optional address that can execute migrations */ + + admin: string; + /** CodeID is the reference to the stored WASM code */ + + codeId: Long; + /** Label is optional metadata to be stored with a contract instance. */ + + label: string; + /** Msg json encoded message to be passed to the contract on instantiation */ + + msg: Uint8Array; + /** Funds coins that are transferred to the contract on instantiation */ + + funds: Coin[]; +} +/** + * MsgInstantiateContract create a new smart contract instance for the given + * code id. + */ + +export interface MsgInstantiateContractSDKType { + sender: string; + admin: string; + code_id: Long; + label: string; + msg: Uint8Array; + funds: CoinSDKType[]; +} +/** + * MsgInstantiateContract2 create a new smart contract instance for the given + * code id with a predicable address. + */ + +export interface MsgInstantiateContract2 { + /** Sender is the that actor that signed the messages */ + sender: string; + /** Admin is an optional address that can execute migrations */ + + admin: string; + /** CodeID is the reference to the stored WASM code */ + + codeId: Long; + /** Label is optional metadata to be stored with a contract instance. */ + + label: string; + /** Msg json encoded message to be passed to the contract on instantiation */ + + msg: Uint8Array; + /** Funds coins that are transferred to the contract on instantiation */ + + funds: Coin[]; + /** Salt is an arbitrary value provided by the sender. Size can be 1 to 64. */ + + salt: Uint8Array; + /** + * FixMsg include the msg value into the hash for the predictable address. + * Default is false + */ + + fixMsg: boolean; +} +/** + * MsgInstantiateContract2 create a new smart contract instance for the given + * code id with a predicable address. + */ + +export interface MsgInstantiateContract2SDKType { + sender: string; + admin: string; + code_id: Long; + label: string; + msg: Uint8Array; + funds: CoinSDKType[]; + salt: Uint8Array; + fix_msg: boolean; +} +/** MsgInstantiateContractResponse return instantiation result data */ + +export interface MsgInstantiateContractResponse { + /** Address is the bech32 address of the new contract instance. */ + address: string; + /** Data contains bytes to returned from the contract */ + + data: Uint8Array; +} +/** MsgInstantiateContractResponse return instantiation result data */ + +export interface MsgInstantiateContractResponseSDKType { + address: string; + data: Uint8Array; +} +/** MsgInstantiateContract2Response return instantiation result data */ + +export interface MsgInstantiateContract2Response { + /** Address is the bech32 address of the new contract instance. */ + address: string; + /** Data contains bytes to returned from the contract */ + + data: Uint8Array; +} +/** MsgInstantiateContract2Response return instantiation result data */ + +export interface MsgInstantiateContract2ResponseSDKType { + address: string; + data: Uint8Array; +} +/** MsgExecuteContract submits the given message data to a smart contract */ + +export interface MsgExecuteContract { + /** Sender is the that actor that signed the messages */ + sender: string; + /** Contract is the address of the smart contract */ + + contract: string; + /** Msg json encoded message to be passed to the contract */ + + msg: Uint8Array; + /** Funds coins that are transferred to the contract on execution */ + + funds: Coin[]; +} +/** MsgExecuteContract submits the given message data to a smart contract */ + +export interface MsgExecuteContractSDKType { + sender: string; + contract: string; + msg: Uint8Array; + funds: CoinSDKType[]; +} +/** MsgExecuteContractResponse returns execution result data. */ + +export interface MsgExecuteContractResponse { + /** Data contains bytes to returned from the contract */ + data: Uint8Array; +} +/** MsgExecuteContractResponse returns execution result data. */ + +export interface MsgExecuteContractResponseSDKType { + data: Uint8Array; +} +/** MsgMigrateContract runs a code upgrade/ downgrade for a smart contract */ + +export interface MsgMigrateContract { + /** Sender is the that actor that signed the messages */ + sender: string; + /** Contract is the address of the smart contract */ + + contract: string; + /** CodeID references the new WASM code */ + + codeId: Long; + /** Msg json encoded message to be passed to the contract on migration */ + + msg: Uint8Array; +} +/** MsgMigrateContract runs a code upgrade/ downgrade for a smart contract */ + +export interface MsgMigrateContractSDKType { + sender: string; + contract: string; + code_id: Long; + msg: Uint8Array; +} +/** MsgMigrateContractResponse returns contract migration result data. */ + +export interface MsgMigrateContractResponse { + /** + * Data contains same raw bytes returned as data from the wasm contract. + * (May be empty) + */ + data: Uint8Array; +} +/** MsgMigrateContractResponse returns contract migration result data. */ + +export interface MsgMigrateContractResponseSDKType { + data: Uint8Array; +} +/** MsgUpdateAdmin sets a new admin for a smart contract */ + +export interface MsgUpdateAdmin { + /** Sender is the that actor that signed the messages */ + sender: string; + /** NewAdmin address to be set */ + + newAdmin: string; + /** Contract is the address of the smart contract */ + + contract: string; +} +/** MsgUpdateAdmin sets a new admin for a smart contract */ + +export interface MsgUpdateAdminSDKType { + sender: string; + new_admin: string; + contract: string; +} +/** MsgUpdateAdminResponse returns empty data */ + +export interface MsgUpdateAdminResponse {} +/** MsgUpdateAdminResponse returns empty data */ + +export interface MsgUpdateAdminResponseSDKType {} +/** MsgClearAdmin removes any admin stored for a smart contract */ + +export interface MsgClearAdmin { + /** Sender is the that actor that signed the messages */ + sender: string; + /** Contract is the address of the smart contract */ + + contract: string; +} +/** MsgClearAdmin removes any admin stored for a smart contract */ + +export interface MsgClearAdminSDKType { + sender: string; + contract: string; +} +/** MsgClearAdminResponse returns empty data */ + +export interface MsgClearAdminResponse {} +/** MsgClearAdminResponse returns empty data */ + +export interface MsgClearAdminResponseSDKType {} + +function createBaseMsgStoreCode(): MsgStoreCode { + return { + sender: "", + wasmByteCode: new Uint8Array(), + instantiatePermission: undefined + }; +} + +export const MsgStoreCode = { + encode(message: MsgStoreCode, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sender !== "") { + writer.uint32(10).string(message.sender); + } + + if (message.wasmByteCode.length !== 0) { + writer.uint32(18).bytes(message.wasmByteCode); + } + + if (message.instantiatePermission !== undefined) { + AccessConfig.encode(message.instantiatePermission, writer.uint32(42).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgStoreCode { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgStoreCode(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sender = reader.string(); + break; + + case 2: + message.wasmByteCode = reader.bytes(); + break; + + case 5: + message.instantiatePermission = AccessConfig.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgStoreCode { + const message = createBaseMsgStoreCode(); + message.sender = object.sender ?? ""; + message.wasmByteCode = object.wasmByteCode ?? new Uint8Array(); + message.instantiatePermission = object.instantiatePermission !== undefined && object.instantiatePermission !== null ? AccessConfig.fromPartial(object.instantiatePermission) : undefined; + return message; + } + +}; + +function createBaseMsgStoreCodeResponse(): MsgStoreCodeResponse { + return { + codeId: Long.UZERO, + checksum: new Uint8Array() + }; +} + +export const MsgStoreCodeResponse = { + encode(message: MsgStoreCodeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.codeId.isZero()) { + writer.uint32(8).uint64(message.codeId); + } + + if (message.checksum.length !== 0) { + writer.uint32(18).bytes(message.checksum); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgStoreCodeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgStoreCodeResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.codeId = (reader.uint64() as Long); + break; + + case 2: + message.checksum = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgStoreCodeResponse { + const message = createBaseMsgStoreCodeResponse(); + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.checksum = object.checksum ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMsgInstantiateContract(): MsgInstantiateContract { + return { + sender: "", + admin: "", + codeId: Long.UZERO, + label: "", + msg: new Uint8Array(), + funds: [] + }; +} + +export const MsgInstantiateContract = { + encode(message: MsgInstantiateContract, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sender !== "") { + writer.uint32(10).string(message.sender); + } + + if (message.admin !== "") { + writer.uint32(18).string(message.admin); + } + + if (!message.codeId.isZero()) { + writer.uint32(24).uint64(message.codeId); + } + + if (message.label !== "") { + writer.uint32(34).string(message.label); + } + + if (message.msg.length !== 0) { + writer.uint32(42).bytes(message.msg); + } + + for (const v of message.funds) { + Coin.encode(v!, writer.uint32(50).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgInstantiateContract { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgInstantiateContract(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sender = reader.string(); + break; + + case 2: + message.admin = reader.string(); + break; + + case 3: + message.codeId = (reader.uint64() as Long); + break; + + case 4: + message.label = reader.string(); + break; + + case 5: + message.msg = reader.bytes(); + break; + + case 6: + message.funds.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgInstantiateContract { + const message = createBaseMsgInstantiateContract(); + message.sender = object.sender ?? ""; + message.admin = object.admin ?? ""; + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.label = object.label ?? ""; + message.msg = object.msg ?? new Uint8Array(); + message.funds = object.funds?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgInstantiateContract2(): MsgInstantiateContract2 { + return { + sender: "", + admin: "", + codeId: Long.UZERO, + label: "", + msg: new Uint8Array(), + funds: [], + salt: new Uint8Array(), + fixMsg: false + }; +} + +export const MsgInstantiateContract2 = { + encode(message: MsgInstantiateContract2, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sender !== "") { + writer.uint32(10).string(message.sender); + } + + if (message.admin !== "") { + writer.uint32(18).string(message.admin); + } + + if (!message.codeId.isZero()) { + writer.uint32(24).uint64(message.codeId); + } + + if (message.label !== "") { + writer.uint32(34).string(message.label); + } + + if (message.msg.length !== 0) { + writer.uint32(42).bytes(message.msg); + } + + for (const v of message.funds) { + Coin.encode(v!, writer.uint32(50).fork()).ldelim(); + } + + if (message.salt.length !== 0) { + writer.uint32(58).bytes(message.salt); + } + + if (message.fixMsg === true) { + writer.uint32(64).bool(message.fixMsg); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgInstantiateContract2 { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgInstantiateContract2(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sender = reader.string(); + break; + + case 2: + message.admin = reader.string(); + break; + + case 3: + message.codeId = (reader.uint64() as Long); + break; + + case 4: + message.label = reader.string(); + break; + + case 5: + message.msg = reader.bytes(); + break; + + case 6: + message.funds.push(Coin.decode(reader, reader.uint32())); + break; + + case 7: + message.salt = reader.bytes(); + break; + + case 8: + message.fixMsg = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgInstantiateContract2 { + const message = createBaseMsgInstantiateContract2(); + message.sender = object.sender ?? ""; + message.admin = object.admin ?? ""; + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.label = object.label ?? ""; + message.msg = object.msg ?? new Uint8Array(); + message.funds = object.funds?.map(e => Coin.fromPartial(e)) || []; + message.salt = object.salt ?? new Uint8Array(); + message.fixMsg = object.fixMsg ?? false; + return message; + } + +}; + +function createBaseMsgInstantiateContractResponse(): MsgInstantiateContractResponse { + return { + address: "", + data: new Uint8Array() + }; +} + +export const MsgInstantiateContractResponse = { + encode(message: MsgInstantiateContractResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgInstantiateContractResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgInstantiateContractResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgInstantiateContractResponse { + const message = createBaseMsgInstantiateContractResponse(); + message.address = object.address ?? ""; + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMsgInstantiateContract2Response(): MsgInstantiateContract2Response { + return { + address: "", + data: new Uint8Array() + }; +} + +export const MsgInstantiateContract2Response = { + encode(message: MsgInstantiateContract2Response, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgInstantiateContract2Response { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgInstantiateContract2Response(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgInstantiateContract2Response { + const message = createBaseMsgInstantiateContract2Response(); + message.address = object.address ?? ""; + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMsgExecuteContract(): MsgExecuteContract { + return { + sender: "", + contract: "", + msg: new Uint8Array(), + funds: [] + }; +} + +export const MsgExecuteContract = { + encode(message: MsgExecuteContract, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sender !== "") { + writer.uint32(10).string(message.sender); + } + + if (message.contract !== "") { + writer.uint32(18).string(message.contract); + } + + if (message.msg.length !== 0) { + writer.uint32(26).bytes(message.msg); + } + + for (const v of message.funds) { + Coin.encode(v!, writer.uint32(42).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExecuteContract { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgExecuteContract(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sender = reader.string(); + break; + + case 2: + message.contract = reader.string(); + break; + + case 3: + message.msg = reader.bytes(); + break; + + case 5: + message.funds.push(Coin.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgExecuteContract { + const message = createBaseMsgExecuteContract(); + message.sender = object.sender ?? ""; + message.contract = object.contract ?? ""; + message.msg = object.msg ?? new Uint8Array(); + message.funds = object.funds?.map(e => Coin.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMsgExecuteContractResponse(): MsgExecuteContractResponse { + return { + data: new Uint8Array() + }; +} + +export const MsgExecuteContractResponse = { + encode(message: MsgExecuteContractResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data.length !== 0) { + writer.uint32(10).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgExecuteContractResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgExecuteContractResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgExecuteContractResponse { + const message = createBaseMsgExecuteContractResponse(); + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMsgMigrateContract(): MsgMigrateContract { + return { + sender: "", + contract: "", + codeId: Long.UZERO, + msg: new Uint8Array() + }; +} + +export const MsgMigrateContract = { + encode(message: MsgMigrateContract, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sender !== "") { + writer.uint32(10).string(message.sender); + } + + if (message.contract !== "") { + writer.uint32(18).string(message.contract); + } + + if (!message.codeId.isZero()) { + writer.uint32(24).uint64(message.codeId); + } + + if (message.msg.length !== 0) { + writer.uint32(34).bytes(message.msg); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgMigrateContract { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgMigrateContract(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sender = reader.string(); + break; + + case 2: + message.contract = reader.string(); + break; + + case 3: + message.codeId = (reader.uint64() as Long); + break; + + case 4: + message.msg = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgMigrateContract { + const message = createBaseMsgMigrateContract(); + message.sender = object.sender ?? ""; + message.contract = object.contract ?? ""; + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.msg = object.msg ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMsgMigrateContractResponse(): MsgMigrateContractResponse { + return { + data: new Uint8Array() + }; +} + +export const MsgMigrateContractResponse = { + encode(message: MsgMigrateContractResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data.length !== 0) { + writer.uint32(10).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgMigrateContractResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgMigrateContractResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgMigrateContractResponse { + const message = createBaseMsgMigrateContractResponse(); + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMsgUpdateAdmin(): MsgUpdateAdmin { + return { + sender: "", + newAdmin: "", + contract: "" + }; +} + +export const MsgUpdateAdmin = { + encode(message: MsgUpdateAdmin, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sender !== "") { + writer.uint32(10).string(message.sender); + } + + if (message.newAdmin !== "") { + writer.uint32(18).string(message.newAdmin); + } + + if (message.contract !== "") { + writer.uint32(26).string(message.contract); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateAdmin { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateAdmin(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sender = reader.string(); + break; + + case 2: + message.newAdmin = reader.string(); + break; + + case 3: + message.contract = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateAdmin { + const message = createBaseMsgUpdateAdmin(); + message.sender = object.sender ?? ""; + message.newAdmin = object.newAdmin ?? ""; + message.contract = object.contract ?? ""; + return message; + } + +}; + +function createBaseMsgUpdateAdminResponse(): MsgUpdateAdminResponse { + return {}; +} + +export const MsgUpdateAdminResponse = { + encode(_: MsgUpdateAdminResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateAdminResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateAdminResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateAdminResponse { + const message = createBaseMsgUpdateAdminResponse(); + return message; + } + +}; + +function createBaseMsgClearAdmin(): MsgClearAdmin { + return { + sender: "", + contract: "" + }; +} + +export const MsgClearAdmin = { + encode(message: MsgClearAdmin, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sender !== "") { + writer.uint32(10).string(message.sender); + } + + if (message.contract !== "") { + writer.uint32(26).string(message.contract); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgClearAdmin { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgClearAdmin(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sender = reader.string(); + break; + + case 3: + message.contract = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgClearAdmin { + const message = createBaseMsgClearAdmin(); + message.sender = object.sender ?? ""; + message.contract = object.contract ?? ""; + return message; + } + +}; + +function createBaseMsgClearAdminResponse(): MsgClearAdminResponse { + return {}; +} + +export const MsgClearAdminResponse = { + encode(_: MsgClearAdminResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgClearAdminResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgClearAdminResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgClearAdminResponse { + const message = createBaseMsgClearAdminResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/cosmwasm/wasm/v1/types.ts b/packages/codegen/src/cosmwasm/wasm/v1/types.ts new file mode 100644 index 00000000..5fd2ba22 --- /dev/null +++ b/packages/codegen/src/cosmwasm/wasm/v1/types.ts @@ -0,0 +1,811 @@ +import { Any, AnySDKType } from "../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../helpers"; +/** AccessType permission types */ + +export enum AccessType { + /** ACCESS_TYPE_UNSPECIFIED - AccessTypeUnspecified placeholder for empty value */ + ACCESS_TYPE_UNSPECIFIED = 0, + + /** ACCESS_TYPE_NOBODY - AccessTypeNobody forbidden */ + ACCESS_TYPE_NOBODY = 1, + + /** + * ACCESS_TYPE_ONLY_ADDRESS - AccessTypeOnlyAddress restricted to a single address + * Deprecated: use AccessTypeAnyOfAddresses instead + */ + ACCESS_TYPE_ONLY_ADDRESS = 2, + + /** ACCESS_TYPE_EVERYBODY - AccessTypeEverybody unrestricted */ + ACCESS_TYPE_EVERYBODY = 3, + + /** ACCESS_TYPE_ANY_OF_ADDRESSES - AccessTypeAnyOfAddresses allow any of the addresses */ + ACCESS_TYPE_ANY_OF_ADDRESSES = 4, + UNRECOGNIZED = -1, +} +export const AccessTypeSDKType = AccessType; +export function accessTypeFromJSON(object: any): AccessType { + switch (object) { + case 0: + case "ACCESS_TYPE_UNSPECIFIED": + return AccessType.ACCESS_TYPE_UNSPECIFIED; + + case 1: + case "ACCESS_TYPE_NOBODY": + return AccessType.ACCESS_TYPE_NOBODY; + + case 2: + case "ACCESS_TYPE_ONLY_ADDRESS": + return AccessType.ACCESS_TYPE_ONLY_ADDRESS; + + case 3: + case "ACCESS_TYPE_EVERYBODY": + return AccessType.ACCESS_TYPE_EVERYBODY; + + case 4: + case "ACCESS_TYPE_ANY_OF_ADDRESSES": + return AccessType.ACCESS_TYPE_ANY_OF_ADDRESSES; + + case -1: + case "UNRECOGNIZED": + default: + return AccessType.UNRECOGNIZED; + } +} +export function accessTypeToJSON(object: AccessType): string { + switch (object) { + case AccessType.ACCESS_TYPE_UNSPECIFIED: + return "ACCESS_TYPE_UNSPECIFIED"; + + case AccessType.ACCESS_TYPE_NOBODY: + return "ACCESS_TYPE_NOBODY"; + + case AccessType.ACCESS_TYPE_ONLY_ADDRESS: + return "ACCESS_TYPE_ONLY_ADDRESS"; + + case AccessType.ACCESS_TYPE_EVERYBODY: + return "ACCESS_TYPE_EVERYBODY"; + + case AccessType.ACCESS_TYPE_ANY_OF_ADDRESSES: + return "ACCESS_TYPE_ANY_OF_ADDRESSES"; + + case AccessType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** ContractCodeHistoryOperationType actions that caused a code change */ + +export enum ContractCodeHistoryOperationType { + /** CONTRACT_CODE_HISTORY_OPERATION_TYPE_UNSPECIFIED - ContractCodeHistoryOperationTypeUnspecified placeholder for empty value */ + CONTRACT_CODE_HISTORY_OPERATION_TYPE_UNSPECIFIED = 0, + + /** CONTRACT_CODE_HISTORY_OPERATION_TYPE_INIT - ContractCodeHistoryOperationTypeInit on chain contract instantiation */ + CONTRACT_CODE_HISTORY_OPERATION_TYPE_INIT = 1, + + /** CONTRACT_CODE_HISTORY_OPERATION_TYPE_MIGRATE - ContractCodeHistoryOperationTypeMigrate code migration */ + CONTRACT_CODE_HISTORY_OPERATION_TYPE_MIGRATE = 2, + + /** CONTRACT_CODE_HISTORY_OPERATION_TYPE_GENESIS - ContractCodeHistoryOperationTypeGenesis based on genesis data */ + CONTRACT_CODE_HISTORY_OPERATION_TYPE_GENESIS = 3, + UNRECOGNIZED = -1, +} +export const ContractCodeHistoryOperationTypeSDKType = ContractCodeHistoryOperationType; +export function contractCodeHistoryOperationTypeFromJSON(object: any): ContractCodeHistoryOperationType { + switch (object) { + case 0: + case "CONTRACT_CODE_HISTORY_OPERATION_TYPE_UNSPECIFIED": + return ContractCodeHistoryOperationType.CONTRACT_CODE_HISTORY_OPERATION_TYPE_UNSPECIFIED; + + case 1: + case "CONTRACT_CODE_HISTORY_OPERATION_TYPE_INIT": + return ContractCodeHistoryOperationType.CONTRACT_CODE_HISTORY_OPERATION_TYPE_INIT; + + case 2: + case "CONTRACT_CODE_HISTORY_OPERATION_TYPE_MIGRATE": + return ContractCodeHistoryOperationType.CONTRACT_CODE_HISTORY_OPERATION_TYPE_MIGRATE; + + case 3: + case "CONTRACT_CODE_HISTORY_OPERATION_TYPE_GENESIS": + return ContractCodeHistoryOperationType.CONTRACT_CODE_HISTORY_OPERATION_TYPE_GENESIS; + + case -1: + case "UNRECOGNIZED": + default: + return ContractCodeHistoryOperationType.UNRECOGNIZED; + } +} +export function contractCodeHistoryOperationTypeToJSON(object: ContractCodeHistoryOperationType): string { + switch (object) { + case ContractCodeHistoryOperationType.CONTRACT_CODE_HISTORY_OPERATION_TYPE_UNSPECIFIED: + return "CONTRACT_CODE_HISTORY_OPERATION_TYPE_UNSPECIFIED"; + + case ContractCodeHistoryOperationType.CONTRACT_CODE_HISTORY_OPERATION_TYPE_INIT: + return "CONTRACT_CODE_HISTORY_OPERATION_TYPE_INIT"; + + case ContractCodeHistoryOperationType.CONTRACT_CODE_HISTORY_OPERATION_TYPE_MIGRATE: + return "CONTRACT_CODE_HISTORY_OPERATION_TYPE_MIGRATE"; + + case ContractCodeHistoryOperationType.CONTRACT_CODE_HISTORY_OPERATION_TYPE_GENESIS: + return "CONTRACT_CODE_HISTORY_OPERATION_TYPE_GENESIS"; + + case ContractCodeHistoryOperationType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** AccessTypeParam */ + +export interface AccessTypeParam { + value: AccessType; +} +/** AccessTypeParam */ + +export interface AccessTypeParamSDKType { + value: AccessType; +} +/** AccessConfig access control type. */ + +export interface AccessConfig { + permission: AccessType; + /** + * Address + * Deprecated: replaced by addresses + */ + + address: string; + addresses: string[]; +} +/** AccessConfig access control type. */ + +export interface AccessConfigSDKType { + permission: AccessType; + address: string; + addresses: string[]; +} +/** Params defines the set of wasm parameters. */ + +export interface Params { + codeUploadAccess?: AccessConfig; + instantiateDefaultPermission: AccessType; +} +/** Params defines the set of wasm parameters. */ + +export interface ParamsSDKType { + code_upload_access?: AccessConfigSDKType; + instantiate_default_permission: AccessType; +} +/** CodeInfo is data for the uploaded contract WASM code */ + +export interface CodeInfo { + /** CodeHash is the unique identifier created by wasmvm */ + codeHash: Uint8Array; + /** Creator address who initially stored the code */ + + creator: string; + /** InstantiateConfig access control to apply on contract creation, optional */ + + instantiateConfig?: AccessConfig; +} +/** CodeInfo is data for the uploaded contract WASM code */ + +export interface CodeInfoSDKType { + code_hash: Uint8Array; + creator: string; + instantiate_config?: AccessConfigSDKType; +} +/** ContractInfo stores a WASM contract instance */ + +export interface ContractInfo { + /** CodeID is the reference to the stored Wasm code */ + codeId: Long; + /** Creator address who initially instantiated the contract */ + + creator: string; + /** Admin is an optional address that can execute migrations */ + + admin: string; + /** Label is optional metadata to be stored with a contract instance. */ + + label: string; + /** Created Tx position when the contract was instantiated. */ + + created?: AbsoluteTxPosition; + ibcPortId: string; + /** + * Extension is an extension point to store custom metadata within the + * persistence model. + */ + + extension?: Any; +} +/** ContractInfo stores a WASM contract instance */ + +export interface ContractInfoSDKType { + code_id: Long; + creator: string; + admin: string; + label: string; + created?: AbsoluteTxPositionSDKType; + ibc_port_id: string; + extension?: AnySDKType; +} +/** ContractCodeHistoryEntry metadata to a contract. */ + +export interface ContractCodeHistoryEntry { + operation: ContractCodeHistoryOperationType; + /** CodeID is the reference to the stored WASM code */ + + codeId: Long; + /** Updated Tx position when the operation was executed. */ + + updated?: AbsoluteTxPosition; + msg: Uint8Array; +} +/** ContractCodeHistoryEntry metadata to a contract. */ + +export interface ContractCodeHistoryEntrySDKType { + operation: ContractCodeHistoryOperationType; + code_id: Long; + updated?: AbsoluteTxPositionSDKType; + msg: Uint8Array; +} +/** + * AbsoluteTxPosition is a unique transaction position that allows for global + * ordering of transactions. + */ + +export interface AbsoluteTxPosition { + /** BlockHeight is the block the contract was created at */ + blockHeight: Long; + /** + * TxIndex is a monotonic counter within the block (actual transaction index, + * or gas consumed) + */ + + txIndex: Long; +} +/** + * AbsoluteTxPosition is a unique transaction position that allows for global + * ordering of transactions. + */ + +export interface AbsoluteTxPositionSDKType { + block_height: Long; + tx_index: Long; +} +/** Model is a struct that holds a KV pair */ + +export interface Model { + /** hex-encode key to read it better (this is often ascii) */ + key: Uint8Array; + /** base64-encode raw value */ + + value: Uint8Array; +} +/** Model is a struct that holds a KV pair */ + +export interface ModelSDKType { + key: Uint8Array; + value: Uint8Array; +} + +function createBaseAccessTypeParam(): AccessTypeParam { + return { + value: 0 + }; +} + +export const AccessTypeParam = { + encode(message: AccessTypeParam, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.value !== 0) { + writer.uint32(8).int32(message.value); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AccessTypeParam { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAccessTypeParam(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.value = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AccessTypeParam { + const message = createBaseAccessTypeParam(); + message.value = object.value ?? 0; + return message; + } + +}; + +function createBaseAccessConfig(): AccessConfig { + return { + permission: 0, + address: "", + addresses: [] + }; +} + +export const AccessConfig = { + encode(message: AccessConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.permission !== 0) { + writer.uint32(8).int32(message.permission); + } + + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + + for (const v of message.addresses) { + writer.uint32(26).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AccessConfig { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAccessConfig(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.permission = (reader.int32() as any); + break; + + case 2: + message.address = reader.string(); + break; + + case 3: + message.addresses.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AccessConfig { + const message = createBaseAccessConfig(); + message.permission = object.permission ?? 0; + message.address = object.address ?? ""; + message.addresses = object.addresses?.map(e => e) || []; + return message; + } + +}; + +function createBaseParams(): Params { + return { + codeUploadAccess: undefined, + instantiateDefaultPermission: 0 + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.codeUploadAccess !== undefined) { + AccessConfig.encode(message.codeUploadAccess, writer.uint32(10).fork()).ldelim(); + } + + if (message.instantiateDefaultPermission !== 0) { + writer.uint32(16).int32(message.instantiateDefaultPermission); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.codeUploadAccess = AccessConfig.decode(reader, reader.uint32()); + break; + + case 2: + message.instantiateDefaultPermission = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Params { + const message = createBaseParams(); + message.codeUploadAccess = object.codeUploadAccess !== undefined && object.codeUploadAccess !== null ? AccessConfig.fromPartial(object.codeUploadAccess) : undefined; + message.instantiateDefaultPermission = object.instantiateDefaultPermission ?? 0; + return message; + } + +}; + +function createBaseCodeInfo(): CodeInfo { + return { + codeHash: new Uint8Array(), + creator: "", + instantiateConfig: undefined + }; +} + +export const CodeInfo = { + encode(message: CodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.codeHash.length !== 0) { + writer.uint32(10).bytes(message.codeHash); + } + + if (message.creator !== "") { + writer.uint32(18).string(message.creator); + } + + if (message.instantiateConfig !== undefined) { + AccessConfig.encode(message.instantiateConfig, writer.uint32(42).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCodeInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.codeHash = reader.bytes(); + break; + + case 2: + message.creator = reader.string(); + break; + + case 5: + message.instantiateConfig = AccessConfig.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CodeInfo { + const message = createBaseCodeInfo(); + message.codeHash = object.codeHash ?? new Uint8Array(); + message.creator = object.creator ?? ""; + message.instantiateConfig = object.instantiateConfig !== undefined && object.instantiateConfig !== null ? AccessConfig.fromPartial(object.instantiateConfig) : undefined; + return message; + } + +}; + +function createBaseContractInfo(): ContractInfo { + return { + codeId: Long.UZERO, + creator: "", + admin: "", + label: "", + created: undefined, + ibcPortId: "", + extension: undefined + }; +} + +export const ContractInfo = { + encode(message: ContractInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.codeId.isZero()) { + writer.uint32(8).uint64(message.codeId); + } + + if (message.creator !== "") { + writer.uint32(18).string(message.creator); + } + + if (message.admin !== "") { + writer.uint32(26).string(message.admin); + } + + if (message.label !== "") { + writer.uint32(34).string(message.label); + } + + if (message.created !== undefined) { + AbsoluteTxPosition.encode(message.created, writer.uint32(42).fork()).ldelim(); + } + + if (message.ibcPortId !== "") { + writer.uint32(50).string(message.ibcPortId); + } + + if (message.extension !== undefined) { + Any.encode(message.extension, writer.uint32(58).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ContractInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseContractInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.codeId = (reader.uint64() as Long); + break; + + case 2: + message.creator = reader.string(); + break; + + case 3: + message.admin = reader.string(); + break; + + case 4: + message.label = reader.string(); + break; + + case 5: + message.created = AbsoluteTxPosition.decode(reader, reader.uint32()); + break; + + case 6: + message.ibcPortId = reader.string(); + break; + + case 7: + message.extension = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ContractInfo { + const message = createBaseContractInfo(); + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.creator = object.creator ?? ""; + message.admin = object.admin ?? ""; + message.label = object.label ?? ""; + message.created = object.created !== undefined && object.created !== null ? AbsoluteTxPosition.fromPartial(object.created) : undefined; + message.ibcPortId = object.ibcPortId ?? ""; + message.extension = object.extension !== undefined && object.extension !== null ? Any.fromPartial(object.extension) : undefined; + return message; + } + +}; + +function createBaseContractCodeHistoryEntry(): ContractCodeHistoryEntry { + return { + operation: 0, + codeId: Long.UZERO, + updated: undefined, + msg: new Uint8Array() + }; +} + +export const ContractCodeHistoryEntry = { + encode(message: ContractCodeHistoryEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.operation !== 0) { + writer.uint32(8).int32(message.operation); + } + + if (!message.codeId.isZero()) { + writer.uint32(16).uint64(message.codeId); + } + + if (message.updated !== undefined) { + AbsoluteTxPosition.encode(message.updated, writer.uint32(26).fork()).ldelim(); + } + + if (message.msg.length !== 0) { + writer.uint32(34).bytes(message.msg); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ContractCodeHistoryEntry { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseContractCodeHistoryEntry(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.operation = (reader.int32() as any); + break; + + case 2: + message.codeId = (reader.uint64() as Long); + break; + + case 3: + message.updated = AbsoluteTxPosition.decode(reader, reader.uint32()); + break; + + case 4: + message.msg = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ContractCodeHistoryEntry { + const message = createBaseContractCodeHistoryEntry(); + message.operation = object.operation ?? 0; + message.codeId = object.codeId !== undefined && object.codeId !== null ? Long.fromValue(object.codeId) : Long.UZERO; + message.updated = object.updated !== undefined && object.updated !== null ? AbsoluteTxPosition.fromPartial(object.updated) : undefined; + message.msg = object.msg ?? new Uint8Array(); + return message; + } + +}; + +function createBaseAbsoluteTxPosition(): AbsoluteTxPosition { + return { + blockHeight: Long.UZERO, + txIndex: Long.UZERO + }; +} + +export const AbsoluteTxPosition = { + encode(message: AbsoluteTxPosition, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.blockHeight.isZero()) { + writer.uint32(8).uint64(message.blockHeight); + } + + if (!message.txIndex.isZero()) { + writer.uint32(16).uint64(message.txIndex); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AbsoluteTxPosition { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAbsoluteTxPosition(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.blockHeight = (reader.uint64() as Long); + break; + + case 2: + message.txIndex = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): AbsoluteTxPosition { + const message = createBaseAbsoluteTxPosition(); + message.blockHeight = object.blockHeight !== undefined && object.blockHeight !== null ? Long.fromValue(object.blockHeight) : Long.UZERO; + message.txIndex = object.txIndex !== undefined && object.txIndex !== null ? Long.fromValue(object.txIndex) : Long.UZERO; + return message; + } + +}; + +function createBaseModel(): Model { + return { + key: new Uint8Array(), + value: new Uint8Array() + }; +} + +export const Model = { + encode(message: Model, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Model { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModel(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + case 2: + message.value = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Model { + const message = createBaseModel(); + message.key = object.key ?? new Uint8Array(); + message.value = object.value ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/gogoproto/bundle.ts b/packages/codegen/src/gogoproto/bundle.ts new file mode 100644 index 00000000..376d487c --- /dev/null +++ b/packages/codegen/src/gogoproto/bundle.ts @@ -0,0 +1,3 @@ +import * as _102 from "./gogo"; +export const gogoproto = { ..._102 +}; \ No newline at end of file diff --git a/packages/codegen/src/gogoproto/gogo.ts b/packages/codegen/src/gogoproto/gogo.ts new file mode 100644 index 00000000..693da49f --- /dev/null +++ b/packages/codegen/src/gogoproto/gogo.ts @@ -0,0 +1 @@ +export {} \ No newline at end of file diff --git a/packages/codegen/src/google/api/annotations.ts b/packages/codegen/src/google/api/annotations.ts new file mode 100644 index 00000000..693da49f --- /dev/null +++ b/packages/codegen/src/google/api/annotations.ts @@ -0,0 +1 @@ +export {} \ No newline at end of file diff --git a/packages/codegen/src/google/api/http.ts b/packages/codegen/src/google/api/http.ts new file mode 100644 index 00000000..08e2e293 --- /dev/null +++ b/packages/codegen/src/google/api/http.ts @@ -0,0 +1,913 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ + +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parameters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + + fullyDecodeReservedExpansion: boolean; +} +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ + +export interface HttpSDKType { + rules: HttpRuleSDKType[]; + fully_decode_reserved_expansion: boolean; +} +/** + * # gRPC Transcoding + * + * gRPC Transcoding is a feature for mapping between a gRPC method and one or + * more HTTP REST endpoints. It allows developers to build a single API service + * that supports both gRPC APIs and REST APIs. Many systems, including [Google + * APIs](https://github.com/googleapis/googleapis), + * [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC + * Gateway](https://github.com/grpc-ecosystem/grpc-gateway), + * and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature + * and use it for large scale production services. + * + * `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies + * how different portions of the gRPC request message are mapped to the URL + * path, URL query parameters, and HTTP request body. It also controls how the + * gRPC response message is mapped to the HTTP response body. `HttpRule` is + * typically specified as an `google.api.http` annotation on the gRPC method. + * + * Each mapping specifies a URL path template and an HTTP method. The path + * template may refer to one or more fields in the gRPC request message, as long + * as each field is a non-repeated field with a primitive (non-message) type. + * The path template controls how fields of the request message are mapped to + * the URL path. + * + * Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/{name=messages/*}" + * }; + * } + * } + * message GetMessageRequest { + * string name = 1; // Mapped to URL path. + * } + * message Message { + * string text = 1; // The resource content. + * } + * + * This enables an HTTP REST to gRPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` + * + * Any fields in the request message which are not bound by the path template + * automatically become HTTP query parameters if there is no HTTP request body. + * For example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get:"/v1/messages/{message_id}" + * }; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // Mapped to URL path. + * int64 revision = 2; // Mapped to URL query parameter `revision`. + * SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | + * `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: + * "foo"))` + * + * Note that fields which are mapped to URL query parameters must have a + * primitive type or a repeated primitive type or a non-repeated message type. + * In the case of a repeated type, the parameter can be repeated in the URL + * as `...?param=A¶m=B`. In the case of a message type, each field of the + * message is mapped to a separate parameter, such as + * `...?foo.a=A&foo.b=B&foo.c=C`. + * + * For HTTP methods that allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice when + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC mappings: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: + * "123456")` + * + * ## Rules for HTTP mapping + * + * 1. Leaf request fields (recursive expansion nested messages in the request + * message) are classified into three categories: + * - Fields referred by the path template. They are passed via the URL path. + * - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP + * request body. + * - All other fields are passed via the URL query parameters, and the + * parameter name is the field path in the request message. A repeated + * field can be represented as multiple query parameters under the same + * name. + * 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields + * are passed via URL path and HTTP request body. + * 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all + * fields are passed via URL path and URL query parameters. + * + * ### Path template syntax + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single URL path segment. The syntax `**` matches + * zero or more URL path segments, which must be the last part of the URL path + * except the `Verb`. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` + * contains any reserved character, such characters should be percent-encoded + * before the matching. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path on the client + * side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The + * server side does the reverse decoding. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{var}`. + * + * If a variable contains multiple path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path on the + * client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. + * The server side does the reverse decoding, except "%2F" and "%2f" are left + * unchanged. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{+var}`. + * + * ## Using gRPC API Service Configuration + * + * gRPC API Service Configuration (service config) is a configuration language + * for configuring a gRPC service to become a user-facing product. The + * service config is simply the YAML representation of the `google.api.Service` + * proto message. + * + * As an alternative to annotating your proto file, you can configure gRPC + * transcoding in your service config YAML files. You do this by specifying a + * `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same + * effect as the proto annotation. This can be particularly useful if you + * have a proto that is reused in multiple services. Note that any transcoding + * specified in the service config will override any matching transcoding + * configuration in the proto. + * + * Example: + * + * http: + * rules: + * # Selects a gRPC method and applies HttpRule to it. + * - selector: example.v1.Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * ## Special notes + * + * When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the + * proto to JSON conversion must follow the [proto3 + * specification](https://developers.google.com/protocol-buffers/docs/proto3#json). + * + * While the single segment variable follows the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String + * Expansion, the multi segment variable **does not** follow RFC 6570 Section + * 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. As the result, gRPC Transcoding uses a custom encoding + * for multi segment variables. + * + * The path variables **must not** refer to any repeated or mapped field, + * because client libraries are not capable of handling such variable expansion. + * + * The path variables **must not** capture the leading "/" character. The reason + * is that the most common use case "{var}" does not capture the leading "/" + * character. For consistency, all path variables must share the same behavior. + * + * Repeated message fields must not be mapped to URL query parameters, because + * no client library can support such complicated mapping. + * + * If an API needs to use a JSON array for request or response body, it can map + * the request or response body to a repeated field. However, some gRPC + * Transcoding implementations may not support this feature. + */ + +export interface HttpRule { + /** + * Selects a method to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** + * Maps to HTTP GET. Used for listing and getting information about + * resources. + */ + + get?: string; + /** Maps to HTTP PUT. Used for replacing a resource. */ + + put?: string; + /** Maps to HTTP POST. Used for creating a resource or performing an action. */ + + post?: string; + /** Maps to HTTP DELETE. Used for deleting a resource. */ + + delete?: string; + /** Maps to HTTP PATCH. Used for updating a resource. */ + + patch?: string; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + + custom?: CustomHttpPattern; + /** + * The name of the request field whose value is mapped to the HTTP request + * body, or `*` for mapping all request fields not captured by the path + * pattern to the HTTP body, or omitted for not having any HTTP request body. + * + * NOTE: the referred field must be present at the top-level of the request + * message type. + */ + + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * response body. When omitted, the entire response message will be used + * as the HTTP response body. + * + * NOTE: The referred field must be present at the top-level of the response + * message type. + */ + + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + + additionalBindings: HttpRule[]; +} +/** + * # gRPC Transcoding + * + * gRPC Transcoding is a feature for mapping between a gRPC method and one or + * more HTTP REST endpoints. It allows developers to build a single API service + * that supports both gRPC APIs and REST APIs. Many systems, including [Google + * APIs](https://github.com/googleapis/googleapis), + * [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC + * Gateway](https://github.com/grpc-ecosystem/grpc-gateway), + * and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature + * and use it for large scale production services. + * + * `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies + * how different portions of the gRPC request message are mapped to the URL + * path, URL query parameters, and HTTP request body. It also controls how the + * gRPC response message is mapped to the HTTP response body. `HttpRule` is + * typically specified as an `google.api.http` annotation on the gRPC method. + * + * Each mapping specifies a URL path template and an HTTP method. The path + * template may refer to one or more fields in the gRPC request message, as long + * as each field is a non-repeated field with a primitive (non-message) type. + * The path template controls how fields of the request message are mapped to + * the URL path. + * + * Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/{name=messages/*}" + * }; + * } + * } + * message GetMessageRequest { + * string name = 1; // Mapped to URL path. + * } + * message Message { + * string text = 1; // The resource content. + * } + * + * This enables an HTTP REST to gRPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` + * + * Any fields in the request message which are not bound by the path template + * automatically become HTTP query parameters if there is no HTTP request body. + * For example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get:"/v1/messages/{message_id}" + * }; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // Mapped to URL path. + * int64 revision = 2; // Mapped to URL query parameter `revision`. + * SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | + * `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: + * "foo"))` + * + * Note that fields which are mapped to URL query parameters must have a + * primitive type or a repeated primitive type or a non-repeated message type. + * In the case of a repeated type, the parameter can be repeated in the URL + * as `...?param=A¶m=B`. In the case of a message type, each field of the + * message is mapped to a separate parameter, such as + * `...?foo.a=A&foo.b=B&foo.c=C`. + * + * For HTTP methods that allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice when + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC mappings: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: + * "123456")` + * + * ## Rules for HTTP mapping + * + * 1. Leaf request fields (recursive expansion nested messages in the request + * message) are classified into three categories: + * - Fields referred by the path template. They are passed via the URL path. + * - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP + * request body. + * - All other fields are passed via the URL query parameters, and the + * parameter name is the field path in the request message. A repeated + * field can be represented as multiple query parameters under the same + * name. + * 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields + * are passed via URL path and HTTP request body. + * 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all + * fields are passed via URL path and URL query parameters. + * + * ### Path template syntax + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single URL path segment. The syntax `**` matches + * zero or more URL path segments, which must be the last part of the URL path + * except the `Verb`. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` + * contains any reserved character, such characters should be percent-encoded + * before the matching. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path on the client + * side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The + * server side does the reverse decoding. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{var}`. + * + * If a variable contains multiple path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path on the + * client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. + * The server side does the reverse decoding, except "%2F" and "%2f" are left + * unchanged. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{+var}`. + * + * ## Using gRPC API Service Configuration + * + * gRPC API Service Configuration (service config) is a configuration language + * for configuring a gRPC service to become a user-facing product. The + * service config is simply the YAML representation of the `google.api.Service` + * proto message. + * + * As an alternative to annotating your proto file, you can configure gRPC + * transcoding in your service config YAML files. You do this by specifying a + * `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same + * effect as the proto annotation. This can be particularly useful if you + * have a proto that is reused in multiple services. Note that any transcoding + * specified in the service config will override any matching transcoding + * configuration in the proto. + * + * Example: + * + * http: + * rules: + * # Selects a gRPC method and applies HttpRule to it. + * - selector: example.v1.Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * ## Special notes + * + * When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the + * proto to JSON conversion must follow the [proto3 + * specification](https://developers.google.com/protocol-buffers/docs/proto3#json). + * + * While the single segment variable follows the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String + * Expansion, the multi segment variable **does not** follow RFC 6570 Section + * 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. As the result, gRPC Transcoding uses a custom encoding + * for multi segment variables. + * + * The path variables **must not** refer to any repeated or mapped field, + * because client libraries are not capable of handling such variable expansion. + * + * The path variables **must not** capture the leading "/" character. The reason + * is that the most common use case "{var}" does not capture the leading "/" + * character. For consistency, all path variables must share the same behavior. + * + * Repeated message fields must not be mapped to URL query parameters, because + * no client library can support such complicated mapping. + * + * If an API needs to use a JSON array for request or response body, it can map + * the request or response body to a repeated field. However, some gRPC + * Transcoding implementations may not support this feature. + */ + +export interface HttpRuleSDKType { + selector: string; + get?: string; + put?: string; + post?: string; + delete?: string; + patch?: string; + custom?: CustomHttpPatternSDKType; + body: string; + response_body: string; + additional_bindings: HttpRuleSDKType[]; +} +/** A custom pattern is used for defining custom HTTP verb. */ + +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + + path: string; +} +/** A custom pattern is used for defining custom HTTP verb. */ + +export interface CustomHttpPatternSDKType { + kind: string; + path: string; +} + +function createBaseHttp(): Http { + return { + rules: [], + fullyDecodeReservedExpansion: false + }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map(e => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + } + +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [] + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + + case 2: + message.get = reader.string(); + break; + + case 3: + message.put = reader.string(); + break; + + case 4: + message.post = reader.string(); + break; + + case 5: + message.delete = reader.string(); + break; + + case 6: + message.patch = reader.string(); + break; + + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + + case 7: + message.body = reader.string(); + break; + + case 12: + message.responseBody = reader.string(); + break; + + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = object.custom !== undefined && object.custom !== null ? CustomHttpPattern.fromPartial(object.custom) : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map(e => HttpRule.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { + kind: "", + path: "" + }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + + case 2: + message.path = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/google/bundle.ts b/packages/codegen/src/google/bundle.ts new file mode 100644 index 00000000..f15a5325 --- /dev/null +++ b/packages/codegen/src/google/bundle.ts @@ -0,0 +1,18 @@ +import * as _103 from "./api/annotations"; +import * as _104 from "./api/http"; +import * as _105 from "./protobuf/any"; +import * as _106 from "./protobuf/descriptor"; +import * as _107 from "./protobuf/duration"; +import * as _108 from "./protobuf/empty"; +import * as _109 from "./protobuf/timestamp"; +export namespace google { + export const api = { ..._103, + ..._104 + }; + export const protobuf = { ..._105, + ..._106, + ..._107, + ..._108, + ..._109 + }; +} \ No newline at end of file diff --git a/packages/codegen/src/google/protobuf/any.ts b/packages/codegen/src/google/protobuf/any.ts new file mode 100644 index 00000000..216c3e1b --- /dev/null +++ b/packages/codegen/src/google/protobuf/any.ts @@ -0,0 +1,260 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := ptypes.MarshalAny(foo) + * ... + * foo := &pb.Foo{} + * if err := ptypes.UnmarshalAny(any, foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ + +export interface Any { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + typeUrl: string; + /** Must be a valid serialized protocol buffer of the above specified type. */ + + value: Uint8Array; +} +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := ptypes.MarshalAny(foo) + * ... + * foo := &pb.Foo{} + * if err := ptypes.UnmarshalAny(any, foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ + +export interface AnySDKType { + type_url: string; + value: Uint8Array; +} + +function createBaseAny(): Any { + return { + typeUrl: "", + value: new Uint8Array() + }; +} + +export const Any = { + encode(message: Any, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.typeUrl !== "") { + writer.uint32(10).string(message.typeUrl); + } + + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Any { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAny(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.typeUrl = reader.string(); + break; + + case 2: + message.value = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Any { + const message = createBaseAny(); + message.typeUrl = object.typeUrl ?? ""; + message.value = object.value ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/google/protobuf/descriptor.ts b/packages/codegen/src/google/protobuf/descriptor.ts new file mode 100644 index 00000000..7022c025 --- /dev/null +++ b/packages/codegen/src/google/protobuf/descriptor.ts @@ -0,0 +1,3697 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + TYPE_MESSAGE = 11, + + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} +export const FieldDescriptorProto_TypeSDKType = FieldDescriptorProto_Type; +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} +export const FieldDescriptorProto_LabelSDKType = FieldDescriptorProto_Label; +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** Generated classes can be optimized for speed or code size. */ + +export enum FileOptions_OptimizeMode { + /** + * SPEED - Generate complete code for parsing, serialization, + * etc. + */ + SPEED = 1, + + /** CODE_SIZE - Use ReflectionOps to implement these methods. */ + CODE_SIZE = 2, + + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} +export const FileOptions_OptimizeModeSDKType = FileOptions_OptimizeMode; +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} +export const FieldOptions_CTypeSDKType = FieldOptions_CType; +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + + case 1: + case "CORD": + return FieldOptions_CType.CORD; + + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + + case FieldOptions_CType.CORD: + return "CORD"; + + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} +export const FieldOptions_JSTypeSDKType = FieldOptions_JSType; +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ + +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} +export const MethodOptions_IdempotencyLevelSDKType = MethodOptions_IdempotencyLevel; +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ + +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ + +export interface FileDescriptorSetSDKType { + file: FileDescriptorProtoSDKType[]; +} +/** Describes a complete .proto file. */ + +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + package: string; + /** Names of files imported by this file. */ + + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + + weakDependency: number[]; + /** All top-level definitions in this file. */ + + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options?: FileOptions; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + + sourceCodeInfo?: SourceCodeInfo; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + + syntax: string; +} +/** Describes a complete .proto file. */ + +export interface FileDescriptorProtoSDKType { + name: string; + package: string; + dependency: string[]; + public_dependency: number[]; + weak_dependency: number[]; + message_type: DescriptorProtoSDKType[]; + enum_type: EnumDescriptorProtoSDKType[]; + service: ServiceDescriptorProtoSDKType[]; + extension: FieldDescriptorProtoSDKType[]; + options?: FileOptionsSDKType; + source_code_info?: SourceCodeInfoSDKType; + syntax: string; +} +/** Describes a message type. */ + +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options?: MessageOptions; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + + reservedName: string[]; +} +/** Describes a message type. */ + +export interface DescriptorProtoSDKType { + name: string; + field: FieldDescriptorProtoSDKType[]; + extension: FieldDescriptorProtoSDKType[]; + nested_type: DescriptorProtoSDKType[]; + enum_type: EnumDescriptorProtoSDKType[]; + extension_range: DescriptorProto_ExtensionRangeSDKType[]; + oneof_decl: OneofDescriptorProtoSDKType[]; + options?: MessageOptionsSDKType; + reserved_range: DescriptorProto_ReservedRangeSDKType[]; + reserved_name: string[]; +} +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + + end: number; + options?: ExtensionRangeOptions; +} +export interface DescriptorProto_ExtensionRangeSDKType { + start: number; + end: number; + options?: ExtensionRangeOptionsSDKType; +} +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ + +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + + end: number; +} +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ + +export interface DescriptorProto_ReservedRangeSDKType { + start: number; + end: number; +} +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface ExtensionRangeOptionsSDKType { + uninterpreted_option: UninterpretedOptionSDKType[]; +} +/** Describes a field within a message. */ + +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + + jsonName: string; + options?: FieldOptions; +} +/** Describes a field within a message. */ + +export interface FieldDescriptorProtoSDKType { + name: string; + number: number; + label: FieldDescriptorProto_Label; + type: FieldDescriptorProto_Type; + type_name: string; + extendee: string; + default_value: string; + oneof_index: number; + json_name: string; + options?: FieldOptionsSDKType; +} +/** Describes a oneof. */ + +export interface OneofDescriptorProto { + name: string; + options?: OneofOptions; +} +/** Describes a oneof. */ + +export interface OneofDescriptorProtoSDKType { + name: string; + options?: OneofOptionsSDKType; +} +/** Describes an enum type. */ + +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options?: EnumOptions; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + + reservedName: string[]; +} +/** Describes an enum type. */ + +export interface EnumDescriptorProtoSDKType { + name: string; + value: EnumValueDescriptorProtoSDKType[]; + options?: EnumOptionsSDKType; + reserved_range: EnumDescriptorProto_EnumReservedRangeSDKType[]; + reserved_name: string[]; +} +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ + +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + + end: number; +} +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ + +export interface EnumDescriptorProto_EnumReservedRangeSDKType { + start: number; + end: number; +} +/** Describes a value within an enum. */ + +export interface EnumValueDescriptorProto { + name: string; + number: number; + options?: EnumValueOptions; +} +/** Describes a value within an enum. */ + +export interface EnumValueDescriptorProtoSDKType { + name: string; + number: number; + options?: EnumValueOptionsSDKType; +} +/** Describes a service. */ + +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options?: ServiceOptions; +} +/** Describes a service. */ + +export interface ServiceDescriptorProtoSDKType { + name: string; + method: MethodDescriptorProtoSDKType[]; + options?: ServiceOptionsSDKType; +} +/** Describes a method of a service. */ + +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + + inputType: string; + outputType: string; + options?: MethodOptions; + /** Identifies if client streams multiple client messages */ + + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + + serverStreaming: boolean; +} +/** Describes a method of a service. */ + +export interface MethodDescriptorProtoSDKType { + name: string; + input_type: string; + output_type: string; + options?: MethodOptionsSDKType; + client_streaming: boolean; + server_streaming: boolean; +} +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * If set, all the classes from the .proto file are wrapped in a single + * outer class with the given name. This applies to both Proto1 + * (equivalent to the old "--one_java_file" option) and Proto2 (where + * a .proto always translates to a single class, but you may want to + * explicitly choose the class name). + */ + + javaOuterClassname: string; + /** + * If set true, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the outer class + * named by java_outer_classname. However, the outer class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + + javaMultipleFiles: boolean; + /** This option does nothing. */ + + /** @deprecated */ + + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + + uninterpretedOption: UninterpretedOption[]; +} +export interface FileOptionsSDKType { + java_package: string; + java_outer_classname: string; + java_multiple_files: boolean; + /** @deprecated */ + + java_generate_equals_and_hash: boolean; + java_string_check_utf8: boolean; + optimize_for: FileOptions_OptimizeMode; + go_package: string; + cc_generic_services: boolean; + java_generic_services: boolean; + py_generic_services: boolean; + php_generic_services: boolean; + deprecated: boolean; + cc_enable_arenas: boolean; + objc_class_prefix: string; + csharp_namespace: string; + swift_prefix: string; + php_class_prefix: string; + php_namespace: string; + php_metadata_namespace: string; + ruby_package: string; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + + uninterpretedOption: UninterpretedOption[]; +} +export interface MessageOptionsSDKType { + message_set_wire_format: boolean; + no_standard_descriptor_accessor: boolean; + deprecated: boolean; + map_entry: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + + uninterpretedOption: UninterpretedOption[]; +} +export interface FieldOptionsSDKType { + ctype: FieldOptions_CType; + packed: boolean; + jstype: FieldOptions_JSType; + lazy: boolean; + deprecated: boolean; + weak: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface OneofOptionsSDKType { + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + + uninterpretedOption: UninterpretedOption[]; +} +export interface EnumOptionsSDKType { + allow_alias: boolean; + deprecated: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + + uninterpretedOption: UninterpretedOption[]; +} +export interface EnumValueOptionsSDKType { + deprecated: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + + uninterpretedOption: UninterpretedOption[]; +} +export interface ServiceOptionsSDKType { + deprecated: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + + uninterpretedOption: UninterpretedOption[]; +} +export interface MethodOptionsSDKType { + deprecated: boolean; + idempotency_level: MethodOptions_IdempotencyLevel; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ + +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + + identifierValue: string; + positiveIntValue: Long; + negativeIntValue: Long; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ + +export interface UninterpretedOptionSDKType { + name: UninterpretedOption_NamePartSDKType[]; + identifier_value: string; + positive_int_value: Long; + negative_int_value: Long; + double_value: number; + string_value: Uint8Array; + aggregate_value: string; +} +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ + +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ + +export interface UninterpretedOption_NamePartSDKType { + name_part: string; + is_extension: boolean; +} +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ + +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ + +export interface SourceCodeInfoSDKType { + location: SourceCodeInfo_LocationSDKType[]; +} +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. *\/ + * /* Block comment attached to + * * grault. *\/ + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} +export interface SourceCodeInfo_LocationSDKType { + path: number[]; + span: number[]; + leading_comments: string; + trailing_comments: string; + leading_detached_comments: string[]; +} +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ + +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ + +export interface GeneratedCodeInfoSDKType { + annotation: GeneratedCodeInfo_AnnotationSDKType[]; +} +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + + end: number; +} +export interface GeneratedCodeInfo_AnnotationSDKType { + path: number[]; + source_file: string; + begin: number; + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { + file: [] + }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map(e => FileDescriptorProto.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "" + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + + writer.uint32(82).fork(); + + for (const v of message.publicDependency) { + writer.int32(v); + } + + writer.ldelim(); + writer.uint32(90).fork(); + + for (const v of message.weakDependency) { + writer.int32(v); + } + + writer.ldelim(); + + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.package = reader.string(); + break; + + case 3: + message.dependency.push(reader.string()); + break; + + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + + break; + + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + + break; + + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + + case 12: + message.syntax = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map(e => e) || []; + message.publicDependency = object.publicDependency?.map(e => e) || []; + message.weakDependency = object.weakDependency?.map(e => e) || []; + message.messageType = object.messageType?.map(e => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map(e => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map(e => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map(e => FieldDescriptorProto.fromPartial(e)) || []; + message.options = object.options !== undefined && object.options !== null ? FileOptions.fromPartial(object.options) : undefined; + message.sourceCodeInfo = object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) : undefined; + message.syntax = object.syntax ?? ""; + return message; + } + +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [] + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + + case 10: + message.reservedName.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map(e => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map(e => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map(e => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map(e => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map(e => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map(e => OneofDescriptorProto.fromPartial(e)) || []; + message.options = object.options !== undefined && object.options !== null ? MessageOptions.fromPartial(object.options) : undefined; + message.reservedRange = object.reservedRange?.map(e => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map(e => e) || []; + return message; + } + +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { + start: 0, + end: 0, + options: undefined + }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + + case 2: + message.end = reader.int32(); + break; + + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = object.options !== undefined && object.options !== null ? ExtensionRangeOptions.fromPartial(object.options) : undefined; + return message; + } + +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { + start: 0, + end: 0 + }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + + case 2: + message.end = reader.int32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + } + +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { + uninterpretedOption: [] + }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map(e => UninterpretedOption.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 3: + message.number = reader.int32(); + break; + + case 4: + message.label = (reader.int32() as any); + break; + + case 5: + message.type = (reader.int32() as any); + break; + + case 6: + message.typeName = reader.string(); + break; + + case 2: + message.extendee = reader.string(); + break; + + case 7: + message.defaultValue = reader.string(); + break; + + case 9: + message.oneofIndex = reader.int32(); + break; + + case 10: + message.jsonName = reader.string(); + break; + + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = object.options !== undefined && object.options !== null ? FieldOptions.fromPartial(object.options) : undefined; + return message; + } + +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { + name: "", + options: undefined + }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = object.options !== undefined && object.options !== null ? OneofOptions.fromPartial(object.options) : undefined; + return message; + } + +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { + name: "", + value: [], + options: undefined, + reservedRange: [], + reservedName: [] + }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + + case 5: + message.reservedName.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map(e => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = object.options !== undefined && object.options !== null ? EnumOptions.fromPartial(object.options) : undefined; + message.reservedRange = object.reservedRange?.map(e => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map(e => e) || []; + return message; + } + +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { + start: 0, + end: 0 + }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + + case 2: + message.end = reader.int32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + } + +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { + name: "", + number: 0, + options: undefined + }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.number = reader.int32(); + break; + + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = object.options !== undefined && object.options !== null ? EnumValueOptions.fromPartial(object.options) : undefined; + return message; + } + +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { + name: "", + method: [], + options: undefined + }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map(e => MethodDescriptorProto.fromPartial(e)) || []; + message.options = object.options !== undefined && object.options !== null ? ServiceOptions.fromPartial(object.options) : undefined; + return message; + } + +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + + case 2: + message.inputType = reader.string(); + break; + + case 3: + message.outputType = reader.string(); + break; + + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + + case 5: + message.clientStreaming = reader.bool(); + break; + + case 6: + message.serverStreaming = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = object.options !== undefined && object.options !== null ? MethodOptions.fromPartial(object.options) : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + } + +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [] + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + + case 8: + message.javaOuterClassname = reader.string(); + break; + + case 10: + message.javaMultipleFiles = reader.bool(); + break; + + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + + case 9: + message.optimizeFor = (reader.int32() as any); + break; + + case 11: + message.goPackage = reader.string(); + break; + + case 16: + message.ccGenericServices = reader.bool(); + break; + + case 17: + message.javaGenericServices = reader.bool(); + break; + + case 18: + message.pyGenericServices = reader.bool(); + break; + + case 42: + message.phpGenericServices = reader.bool(); + break; + + case 23: + message.deprecated = reader.bool(); + break; + + case 31: + message.ccEnableArenas = reader.bool(); + break; + + case 36: + message.objcClassPrefix = reader.string(); + break; + + case 37: + message.csharpNamespace = reader.string(); + break; + + case 39: + message.swiftPrefix = reader.string(); + break; + + case 40: + message.phpClassPrefix = reader.string(); + break; + + case 41: + message.phpNamespace = reader.string(); + break; + + case 44: + message.phpMetadataNamespace = reader.string(); + break; + + case 45: + message.rubyPackage = reader.string(); + break; + + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map(e => UninterpretedOption.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [] + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + + case 3: + message.deprecated = reader.bool(); + break; + + case 7: + message.mapEntry = reader.bool(); + break; + + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map(e => UninterpretedOption.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseFieldOptions(): FieldOptions { + return { + ctype: 1, + packed: false, + jstype: 1, + lazy: false, + deprecated: false, + weak: false, + uninterpretedOption: [] + }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 1) { + writer.uint32(8).int32(message.ctype); + } + + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + + if (message.jstype !== 1) { + writer.uint32(48).int32(message.jstype); + } + + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.ctype = (reader.int32() as any); + break; + + case 2: + message.packed = reader.bool(); + break; + + case 6: + message.jstype = (reader.int32() as any); + break; + + case 5: + message.lazy = reader.bool(); + break; + + case 3: + message.deprecated = reader.bool(); + break; + + case 10: + message.weak = reader.bool(); + break; + + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 1; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 1; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map(e => UninterpretedOption.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseOneofOptions(): OneofOptions { + return { + uninterpretedOption: [] + }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map(e => UninterpretedOption.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseEnumOptions(): EnumOptions { + return { + allowAlias: false, + deprecated: false, + uninterpretedOption: [] + }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + + case 3: + message.deprecated = reader.bool(); + break; + + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map(e => UninterpretedOption.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { + deprecated: false, + uninterpretedOption: [] + }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map(e => UninterpretedOption.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseServiceOptions(): ServiceOptions { + return { + deprecated: false, + uninterpretedOption: [] + }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map(e => UninterpretedOption.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseMethodOptions(): MethodOptions { + return { + deprecated: false, + idempotencyLevel: 1, + uninterpretedOption: [] + }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + + if (message.idempotencyLevel !== 1) { + writer.uint32(272).int32(message.idempotencyLevel); + } + + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + + case 34: + message.idempotencyLevel = (reader.int32() as any); + break; + + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 1; + message.uninterpretedOption = object.uninterpretedOption?.map(e => UninterpretedOption.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: Long.UZERO, + negativeIntValue: Long.ZERO, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "" + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + + if (!message.positiveIntValue.isZero()) { + writer.uint32(32).uint64(message.positiveIntValue); + } + + if (!message.negativeIntValue.isZero()) { + writer.uint32(40).int64(message.negativeIntValue); + } + + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + + case 3: + message.identifierValue = reader.string(); + break; + + case 4: + message.positiveIntValue = (reader.uint64() as Long); + break; + + case 5: + message.negativeIntValue = (reader.int64() as Long); + break; + + case 6: + message.doubleValue = reader.double(); + break; + + case 7: + message.stringValue = reader.bytes(); + break; + + case 8: + message.aggregateValue = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map(e => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue !== undefined && object.positiveIntValue !== null ? Long.fromValue(object.positiveIntValue) : Long.UZERO; + message.negativeIntValue = object.negativeIntValue !== undefined && object.negativeIntValue !== null ? Long.fromValue(object.negativeIntValue) : Long.ZERO; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + } + +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { + namePart: "", + isExtension: false + }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + + case 2: + message.isExtension = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + } + +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { + location: [] + }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map(e => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { + path: [], + span: [], + leadingComments: "", + trailingComments: "", + leadingDetachedComments: [] + }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + + for (const v of message.path) { + writer.int32(v); + } + + writer.ldelim(); + writer.uint32(18).fork(); + + for (const v of message.span) { + writer.int32(v); + } + + writer.ldelim(); + + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + + break; + + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + + break; + + case 3: + message.leadingComments = reader.string(); + break; + + case 4: + message.trailingComments = reader.string(); + break; + + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map(e => e) || []; + message.span = object.span?.map(e => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map(e => e) || []; + return message; + } + +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { + annotation: [] + }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map(e => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { + path: [], + sourceFile: "", + begin: 0, + end: 0 + }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + + for (const v of message.path) { + writer.int32(v); + } + + writer.ldelim(); + + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + + break; + + case 2: + message.sourceFile = reader.string(); + break; + + case 3: + message.begin = reader.int32(); + break; + + case 4: + message.end = reader.int32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map(e => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/google/protobuf/duration.ts b/packages/codegen/src/google/protobuf/duration.ts new file mode 100644 index 00000000..620a27d1 --- /dev/null +++ b/packages/codegen/src/google/protobuf/duration.ts @@ -0,0 +1,201 @@ +import { Long, DeepPartial } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * A Duration represents a signed, fixed-length span of time represented + * as a count of seconds and fractions of seconds at nanosecond + * resolution. It is independent of any calendar and concepts like "day" + * or "month". It is related to Timestamp in that the difference between + * two Timestamp values is a Duration and it can be added or subtracted + * from a Timestamp. Range is approximately +-10,000 years. + * + * # Examples + * + * Example 1: Compute Duration from two Timestamps in pseudo code. + * + * Timestamp start = ...; + * Timestamp end = ...; + * Duration duration = ...; + * + * duration.seconds = end.seconds - start.seconds; + * duration.nanos = end.nanos - start.nanos; + * + * if (duration.seconds < 0 && duration.nanos > 0) { + * duration.seconds += 1; + * duration.nanos -= 1000000000; + * } else if (durations.seconds > 0 && duration.nanos < 0) { + * duration.seconds -= 1; + * duration.nanos += 1000000000; + * } + * + * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + * + * Timestamp start = ...; + * Duration duration = ...; + * Timestamp end = ...; + * + * end.seconds = start.seconds + duration.seconds; + * end.nanos = start.nanos + duration.nanos; + * + * if (end.nanos < 0) { + * end.seconds -= 1; + * end.nanos += 1000000000; + * } else if (end.nanos >= 1000000000) { + * end.seconds += 1; + * end.nanos -= 1000000000; + * } + * + * Example 3: Compute Duration from datetime.timedelta in Python. + * + * td = datetime.timedelta(days=3, minutes=10) + * duration = Duration() + * duration.FromTimedelta(td) + * + * # JSON Mapping + * + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". + */ + +export interface Duration { + /** + * Signed seconds of the span of time. Must be from -315,576,000,000 + * to +315,576,000,000 inclusive. Note: these bounds are computed from: + * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + */ + seconds: Long; + /** + * Signed fractions of a second at nanosecond resolution of the span + * of time. Durations less than one second are represented with a 0 + * `seconds` field and a positive or negative `nanos` field. For durations + * of one second or more, a non-zero value for the `nanos` field must be + * of the same sign as the `seconds` field. Must be from -999,999,999 + * to +999,999,999 inclusive. + */ + + nanos: number; +} +/** + * A Duration represents a signed, fixed-length span of time represented + * as a count of seconds and fractions of seconds at nanosecond + * resolution. It is independent of any calendar and concepts like "day" + * or "month". It is related to Timestamp in that the difference between + * two Timestamp values is a Duration and it can be added or subtracted + * from a Timestamp. Range is approximately +-10,000 years. + * + * # Examples + * + * Example 1: Compute Duration from two Timestamps in pseudo code. + * + * Timestamp start = ...; + * Timestamp end = ...; + * Duration duration = ...; + * + * duration.seconds = end.seconds - start.seconds; + * duration.nanos = end.nanos - start.nanos; + * + * if (duration.seconds < 0 && duration.nanos > 0) { + * duration.seconds += 1; + * duration.nanos -= 1000000000; + * } else if (durations.seconds > 0 && duration.nanos < 0) { + * duration.seconds -= 1; + * duration.nanos += 1000000000; + * } + * + * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + * + * Timestamp start = ...; + * Duration duration = ...; + * Timestamp end = ...; + * + * end.seconds = start.seconds + duration.seconds; + * end.nanos = start.nanos + duration.nanos; + * + * if (end.nanos < 0) { + * end.seconds -= 1; + * end.nanos += 1000000000; + * } else if (end.nanos >= 1000000000) { + * end.seconds += 1; + * end.nanos -= 1000000000; + * } + * + * Example 3: Compute Duration from datetime.timedelta in Python. + * + * td = datetime.timedelta(days=3, minutes=10) + * duration = Duration() + * duration.FromTimedelta(td) + * + * # JSON Mapping + * + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". + */ + +export interface DurationSDKType { + seconds: Long; + nanos: number; +} + +function createBaseDuration(): Duration { + return { + seconds: Long.ZERO, + nanos: 0 + }; +} + +export const Duration = { + encode(message: Duration, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.seconds.isZero()) { + writer.uint32(8).int64(message.seconds); + } + + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Duration { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDuration(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.seconds = (reader.int64() as Long); + break; + + case 2: + message.nanos = reader.int32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Duration { + const message = createBaseDuration(); + message.seconds = object.seconds !== undefined && object.seconds !== null ? Long.fromValue(object.seconds) : Long.ZERO; + message.nanos = object.nanos ?? 0; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/google/protobuf/empty.ts b/packages/codegen/src/google/protobuf/empty.ts new file mode 100644 index 00000000..16b43f97 --- /dev/null +++ b/packages/codegen/src/google/protobuf/empty.ts @@ -0,0 +1,62 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** + * A generic empty message that you can re-use to avoid defining duplicated + * empty messages in your APIs. A typical example is to use it as the request + * or the response type of an API method. For instance: + * + * service Foo { + * rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + * } + * + * The JSON representation for `Empty` is empty JSON object `{}`. + */ + +export interface Empty {} +/** + * A generic empty message that you can re-use to avoid defining duplicated + * empty messages in your APIs. A typical example is to use it as the request + * or the response type of an API method. For instance: + * + * service Foo { + * rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + * } + * + * The JSON representation for `Empty` is empty JSON object `{}`. + */ + +export interface EmptySDKType {} + +function createBaseEmpty(): Empty { + return {}; +} + +export const Empty = { + encode(_: Empty, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Empty { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEmpty(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): Empty { + const message = createBaseEmpty(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/google/protobuf/timestamp.ts b/packages/codegen/src/google/protobuf/timestamp.ts new file mode 100644 index 00000000..3bb95f53 --- /dev/null +++ b/packages/codegen/src/google/protobuf/timestamp.ts @@ -0,0 +1,247 @@ +import { Long, DeepPartial } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * + * Example 5: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ + +export interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: Long; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + + nanos: number; +} +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * + * Example 5: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ + +export interface TimestampSDKType { + seconds: Long; + nanos: number; +} + +function createBaseTimestamp(): Timestamp { + return { + seconds: Long.ZERO, + nanos: 0 + }; +} + +export const Timestamp = { + encode(message: Timestamp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.seconds.isZero()) { + writer.uint32(8).int64(message.seconds); + } + + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Timestamp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTimestamp(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.seconds = (reader.int64() as Long); + break; + + case 2: + message.nanos = reader.int32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Timestamp { + const message = createBaseTimestamp(); + message.seconds = object.seconds !== undefined && object.seconds !== null ? Long.fromValue(object.seconds) : Long.ZERO; + message.nanos = object.nanos ?? 0; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/helpers.ts b/packages/codegen/src/helpers.ts new file mode 100644 index 00000000..67359aa7 --- /dev/null +++ b/packages/codegen/src/helpers.ts @@ -0,0 +1,242 @@ +/** +* This file and any referenced files were automatically generated by @osmonauts/telescope@0.88.2 +* DO NOT MODIFY BY HAND. Instead, download the latest proto files for your chain +* and run the transpile command or yarn proto command to regenerate this bundle. +*/ + +import * as _m0 from "protobufjs/minimal"; +import Long from 'long'; + +// @ts-ignore +if (_m0.util.Long !== Long) { + _m0.util.Long = (Long as any); + + _m0.configure(); +} + +export { Long }; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== 'undefined') return globalThis; + if (typeof self !== 'undefined') return self; + if (typeof window !== 'undefined') return window; + if (typeof global !== 'undefined') return global; + throw 'Unable to locate global object'; +})(); + +const atob: (b64: string) => string = + globalThis.atob || ((b64) => globalThis.Buffer.from(b64, 'base64').toString('binary')); + +export function bytesFromBase64(b64: string): Uint8Array { + const bin = atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; +} + +const btoa: (bin: string) => string = + globalThis.btoa || ((bin) => globalThis.Buffer.from(bin, 'binary').toString('base64')); + +export function base64FromBytes(arr: Uint8Array): string { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return btoa(bin.join('')); +} + +export interface AminoHeight { + readonly revision_number?: string; + readonly revision_height?: string; +}; + +export function omitDefault(input: T): T | undefined { + if (typeof input === "string") { + return input === "" ? undefined : input; + } + + if (typeof input === "number") { + return input === 0 ? undefined : input; + } + + if (Long.isLong(input)) { + return input.isZero() ? undefined : input; + } + + throw new Error(`Got unsupported type ${typeof input}`); +}; + +interface Duration { + /** + * Signed seconds of the span of time. Must be from -315,576,000,000 + * to +315,576,000,000 inclusive. Note: these bounds are computed from: + * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + */ + seconds: Long; + /** + * Signed fractions of a second at nanosecond resolution of the span + * of time. Durations less than one second are represented with a 0 + * `seconds` field and a positive or negative `nanos` field. For durations + * of one second or more, a non-zero value for the `nanos` field must be + * of the same sign as the `seconds` field. Must be from -999,999,999 + * to +999,999,999 inclusive. + */ + + nanos: number; +} + +export function toDuration(duration: string): Duration { + return { + seconds: Long.fromNumber(Math.floor(parseInt(duration) / 1000000000)), + nanos: parseInt(duration) % 1000000000 + }; +}; + +export function fromDuration(duration: Duration): string { + return (parseInt(duration.seconds.toString()) * 1000000000 + duration.nanos).toString(); +}; + +export function isSet(value: any): boolean { + return value !== null && value !== undefined; +}; + +export function isObject(value: any): boolean { + return typeof value === 'object' && value !== null; +}; + +export interface PageRequest { + key: Uint8Array; + offset: Long; + limit: Long; + countTotal: boolean; + reverse: boolean; +}; + +export interface PageRequestParams { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; +}; + +export interface Params { + params: PageRequestParams; +}; + +export const setPaginationParams = (options: Params, pagination?: PageRequest) => { + + if (!pagination) { + return options; + } + + if (typeof pagination?.countTotal !== "undefined") { + options.params['pagination.count_total'] = pagination.countTotal; + } + if (typeof pagination?.key !== "undefined") { + // String to Uint8Array + // let uint8arr = new Uint8Array(Buffer.from(data,'base64')); + + // Uint8Array to String + options.params['pagination.key'] = Buffer.from(pagination.key).toString('base64'); + } + if (typeof pagination?.limit !== "undefined") { + options.params["pagination.limit"] = pagination.limit.toString() + } + if (typeof pagination?.offset !== "undefined") { + options.params["pagination.offset"] = pagination.offset.toString() + } + if (typeof pagination?.reverse !== "undefined") { + options.params['pagination.reverse'] = pagination.reverse; + } + + return options; +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin + ? T + : T extends Long + ? string | number | Long + : T extends Array + ? Array> + : T extends ReadonlyArray + ? ReadonlyArray> + : T extends {} + ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin + ? P + : P & { [K in keyof P]: Exact } & Record>, never>; + +export interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +}; + +interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: Long; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + + nanos: number; +} + +export function toTimestamp(date: Date): Timestamp { + const seconds = numberToLong(date.getTime() / 1_000); + const nanos = date.getTime() % 1000 * 1000000; + return { + seconds, + nanos + }; +}; + +export function fromTimestamp(t: Timestamp): Date { + let millis = t.seconds.toNumber() * 1000; + millis += t.nanos / 1000000; + return new Date(millis); +}; + +const fromJSON = (object: any): Timestamp => { + return { + seconds: isSet(object.seconds) ? Long.fromString(object.seconds) : Long.ZERO, + nanos: isSet(object.nanos) ? Number(object.nanos) : 0 + }; +}; + +const timestampFromJSON = (object: any): Timestamp => { + return { + seconds: isSet(object.seconds) ? Long.fromValue(object.seconds) : Long.ZERO, + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; +} + +export function fromJsonTimestamp(o: any): Timestamp { + if (o instanceof Date) { + return toTimestamp(o); + } else if (typeof o === "string") { + return toTimestamp(new Date(o)); + } else { + return timestampFromJSON(o); + } +} + +function numberToLong(number: number) { + return Long.fromNumber(number); +} diff --git a/packages/codegen/src/ibc/applications/transfer/v1/genesis.ts b/packages/codegen/src/ibc/applications/transfer/v1/genesis.ts new file mode 100644 index 00000000..a44fcba3 --- /dev/null +++ b/packages/codegen/src/ibc/applications/transfer/v1/genesis.ts @@ -0,0 +1,82 @@ +import { DenomTrace, DenomTraceSDKType, Params, ParamsSDKType } from "./transfer"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** GenesisState defines the ibc-transfer genesis state */ + +export interface GenesisState { + portId: string; + denomTraces: DenomTrace[]; + params?: Params; +} +/** GenesisState defines the ibc-transfer genesis state */ + +export interface GenesisStateSDKType { + port_id: string; + denom_traces: DenomTraceSDKType[]; + params?: ParamsSDKType; +} + +function createBaseGenesisState(): GenesisState { + return { + portId: "", + denomTraces: [], + params: undefined + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + for (const v of message.denomTraces) { + DenomTrace.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.denomTraces.push(DenomTrace.decode(reader, reader.uint32())); + break; + + case 3: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.portId = object.portId ?? ""; + message.denomTraces = object.denomTraces?.map(e => DenomTrace.fromPartial(e)) || []; + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/applications/transfer/v1/query.lcd.ts b/packages/codegen/src/ibc/applications/transfer/v1/query.lcd.ts new file mode 100644 index 00000000..7bce53a4 --- /dev/null +++ b/packages/codegen/src/ibc/applications/transfer/v1/query.lcd.ts @@ -0,0 +1,49 @@ +import { setPaginationParams } from "../../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryDenomTraceRequest, QueryDenomTraceResponseSDKType, QueryDenomTracesRequest, QueryDenomTracesResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.denomTrace = this.denomTrace.bind(this); + this.denomTraces = this.denomTraces.bind(this); + this.params = this.params.bind(this); + } + /* DenomTrace queries a denomination trace information. */ + + + async denomTrace(params: QueryDenomTraceRequest): Promise { + const endpoint = `ibc/apps/transfer/v1/denom_traces/${params.hash}`; + return await this.req.get(endpoint); + } + /* DenomTraces queries all denomination traces. */ + + + async denomTraces(params: QueryDenomTracesRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `ibc/apps/transfer/v1/denom_traces`; + return await this.req.get(endpoint, options); + } + /* Params queries all parameters of the ibc-transfer module. */ + + + async params(_params: QueryParamsRequest = {}): Promise { + const endpoint = `ibc/apps/transfer/v1/params`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/ibc/applications/transfer/v1/query.rpc.Query.ts b/packages/codegen/src/ibc/applications/transfer/v1/query.rpc.Query.ts new file mode 100644 index 00000000..62b2cc99 --- /dev/null +++ b/packages/codegen/src/ibc/applications/transfer/v1/query.rpc.Query.ts @@ -0,0 +1,65 @@ +import { Rpc } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryDenomTraceRequest, QueryDenomTraceResponse, QueryDenomTracesRequest, QueryDenomTracesResponse, QueryParamsRequest, QueryParamsResponse } from "./query"; +/** Query provides defines the gRPC querier service. */ + +export interface Query { + /** DenomTrace queries a denomination trace information. */ + denomTrace(request: QueryDenomTraceRequest): Promise; + /** DenomTraces queries all denomination traces. */ + + denomTraces(request?: QueryDenomTracesRequest): Promise; + /** Params queries all parameters of the ibc-transfer module. */ + + params(request?: QueryParamsRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.denomTrace = this.denomTrace.bind(this); + this.denomTraces = this.denomTraces.bind(this); + this.params = this.params.bind(this); + } + + denomTrace(request: QueryDenomTraceRequest): Promise { + const data = QueryDenomTraceRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.applications.transfer.v1.Query", "DenomTrace", data); + return promise.then(data => QueryDenomTraceResponse.decode(new _m0.Reader(data))); + } + + denomTraces(request: QueryDenomTracesRequest = { + pagination: undefined + }): Promise { + const data = QueryDenomTracesRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.applications.transfer.v1.Query", "DenomTraces", data); + return promise.then(data => QueryDenomTracesResponse.decode(new _m0.Reader(data))); + } + + params(request: QueryParamsRequest = {}): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.applications.transfer.v1.Query", "Params", data); + return promise.then(data => QueryParamsResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + denomTrace(request: QueryDenomTraceRequest): Promise { + return queryService.denomTrace(request); + }, + + denomTraces(request?: QueryDenomTracesRequest): Promise { + return queryService.denomTraces(request); + }, + + params(request?: QueryParamsRequest): Promise { + return queryService.params(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/applications/transfer/v1/query.ts b/packages/codegen/src/ibc/applications/transfer/v1/query.ts new file mode 100644 index 00000000..bee73fc4 --- /dev/null +++ b/packages/codegen/src/ibc/applications/transfer/v1/query.ts @@ -0,0 +1,362 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../../../cosmos/base/query/v1beta1/pagination"; +import { DenomTrace, DenomTraceSDKType, Params, ParamsSDKType } from "./transfer"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * QueryDenomTraceRequest is the request type for the Query/DenomTrace RPC + * method + */ + +export interface QueryDenomTraceRequest { + /** hash (in hex format) of the denomination trace information. */ + hash: string; +} +/** + * QueryDenomTraceRequest is the request type for the Query/DenomTrace RPC + * method + */ + +export interface QueryDenomTraceRequestSDKType { + hash: string; +} +/** + * QueryDenomTraceResponse is the response type for the Query/DenomTrace RPC + * method. + */ + +export interface QueryDenomTraceResponse { + /** denom_trace returns the requested denomination trace information. */ + denomTrace?: DenomTrace; +} +/** + * QueryDenomTraceResponse is the response type for the Query/DenomTrace RPC + * method. + */ + +export interface QueryDenomTraceResponseSDKType { + denom_trace?: DenomTraceSDKType; +} +/** + * QueryConnectionsRequest is the request type for the Query/DenomTraces RPC + * method + */ + +export interface QueryDenomTracesRequest { + /** pagination defines an optional pagination for the request. */ + pagination?: PageRequest; +} +/** + * QueryConnectionsRequest is the request type for the Query/DenomTraces RPC + * method + */ + +export interface QueryDenomTracesRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryConnectionsResponse is the response type for the Query/DenomTraces RPC + * method. + */ + +export interface QueryDenomTracesResponse { + /** denom_traces returns all denominations trace information. */ + denomTraces: DenomTrace[]; + /** pagination defines the pagination in the response. */ + + pagination?: PageResponse; +} +/** + * QueryConnectionsResponse is the response type for the Query/DenomTraces RPC + * method. + */ + +export interface QueryDenomTracesResponseSDKType { + denom_traces: DenomTraceSDKType[]; + pagination?: PageResponseSDKType; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequest {} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ + +export interface QueryParamsRequestSDKType {} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params?: Params; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ + +export interface QueryParamsResponseSDKType { + params?: ParamsSDKType; +} + +function createBaseQueryDenomTraceRequest(): QueryDenomTraceRequest { + return { + hash: "" + }; +} + +export const QueryDenomTraceRequest = { + encode(message: QueryDenomTraceRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hash !== "") { + writer.uint32(10).string(message.hash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomTraceRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDenomTraceRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.hash = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDenomTraceRequest { + const message = createBaseQueryDenomTraceRequest(); + message.hash = object.hash ?? ""; + return message; + } + +}; + +function createBaseQueryDenomTraceResponse(): QueryDenomTraceResponse { + return { + denomTrace: undefined + }; +} + +export const QueryDenomTraceResponse = { + encode(message: QueryDenomTraceResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denomTrace !== undefined) { + DenomTrace.encode(message.denomTrace, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomTraceResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDenomTraceResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.denomTrace = DenomTrace.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDenomTraceResponse { + const message = createBaseQueryDenomTraceResponse(); + message.denomTrace = object.denomTrace !== undefined && object.denomTrace !== null ? DenomTrace.fromPartial(object.denomTrace) : undefined; + return message; + } + +}; + +function createBaseQueryDenomTracesRequest(): QueryDenomTracesRequest { + return { + pagination: undefined + }; +} + +export const QueryDenomTracesRequest = { + encode(message: QueryDenomTracesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomTracesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDenomTracesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDenomTracesRequest { + const message = createBaseQueryDenomTracesRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryDenomTracesResponse(): QueryDenomTracesResponse { + return { + denomTraces: [], + pagination: undefined + }; +} + +export const QueryDenomTracesResponse = { + encode(message: QueryDenomTracesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.denomTraces) { + DenomTrace.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryDenomTracesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDenomTracesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.denomTraces.push(DenomTrace.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryDenomTracesResponse { + const message = createBaseQueryDenomTracesResponse(); + message.denomTraces = object.denomTraces?.map(e => DenomTrace.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + } + +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + params: undefined + }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/applications/transfer/v1/transfer.ts b/packages/codegen/src/ibc/applications/transfer/v1/transfer.ts new file mode 100644 index 00000000..40710588 --- /dev/null +++ b/packages/codegen/src/ibc/applications/transfer/v1/transfer.ts @@ -0,0 +1,167 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * DenomTrace contains the base denomination for ICS20 fungible tokens and the + * source tracing information path. + */ + +export interface DenomTrace { + /** + * path defines the chain of port/channel identifiers used for tracing the + * source of the fungible token. + */ + path: string; + /** base denomination of the relayed fungible token. */ + + baseDenom: string; +} +/** + * DenomTrace contains the base denomination for ICS20 fungible tokens and the + * source tracing information path. + */ + +export interface DenomTraceSDKType { + path: string; + base_denom: string; +} +/** + * Params defines the set of IBC transfer parameters. + * NOTE: To prevent a single token from being transferred, set the + * TransfersEnabled parameter to true and then set the bank module's SendEnabled + * parameter for the denomination to false. + */ + +export interface Params { + /** + * send_enabled enables or disables all cross-chain token transfers from this + * chain. + */ + sendEnabled: boolean; + /** + * receive_enabled enables or disables all cross-chain token transfers to this + * chain. + */ + + receiveEnabled: boolean; +} +/** + * Params defines the set of IBC transfer parameters. + * NOTE: To prevent a single token from being transferred, set the + * TransfersEnabled parameter to true and then set the bank module's SendEnabled + * parameter for the denomination to false. + */ + +export interface ParamsSDKType { + send_enabled: boolean; + receive_enabled: boolean; +} + +function createBaseDenomTrace(): DenomTrace { + return { + path: "", + baseDenom: "" + }; +} + +export const DenomTrace = { + encode(message: DenomTrace, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path !== "") { + writer.uint32(10).string(message.path); + } + + if (message.baseDenom !== "") { + writer.uint32(18).string(message.baseDenom); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DenomTrace { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDenomTrace(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.string(); + break; + + case 2: + message.baseDenom = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DenomTrace { + const message = createBaseDenomTrace(); + message.path = object.path ?? ""; + message.baseDenom = object.baseDenom ?? ""; + return message; + } + +}; + +function createBaseParams(): Params { + return { + sendEnabled: false, + receiveEnabled: false + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sendEnabled === true) { + writer.uint32(8).bool(message.sendEnabled); + } + + if (message.receiveEnabled === true) { + writer.uint32(16).bool(message.receiveEnabled); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sendEnabled = reader.bool(); + break; + + case 2: + message.receiveEnabled = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Params { + const message = createBaseParams(); + message.sendEnabled = object.sendEnabled ?? false; + message.receiveEnabled = object.receiveEnabled ?? false; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/applications/transfer/v1/tx.rpc.msg.ts b/packages/codegen/src/ibc/applications/transfer/v1/tx.rpc.msg.ts new file mode 100644 index 00000000..b0ed7b31 --- /dev/null +++ b/packages/codegen/src/ibc/applications/transfer/v1/tx.rpc.msg.ts @@ -0,0 +1,24 @@ +import { Rpc } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgTransfer, MsgTransferResponse } from "./tx"; +/** Msg defines the ibc/transfer Msg service. */ + +export interface Msg { + /** Transfer defines a rpc handler method for MsgTransfer. */ + transfer(request: MsgTransfer): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.transfer = this.transfer.bind(this); + } + + transfer(request: MsgTransfer): Promise { + const data = MsgTransfer.encode(request).finish(); + const promise = this.rpc.request("ibc.applications.transfer.v1.Msg", "Transfer", data); + return promise.then(data => MsgTransferResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/ibc/applications/transfer/v1/tx.ts b/packages/codegen/src/ibc/applications/transfer/v1/tx.ts new file mode 100644 index 00000000..54c7ab4f --- /dev/null +++ b/packages/codegen/src/ibc/applications/transfer/v1/tx.ts @@ -0,0 +1,198 @@ +import { Coin, CoinSDKType } from "../../../../cosmos/base/v1beta1/coin"; +import { Height, HeightSDKType } from "../../../core/client/v1/client"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * MsgTransfer defines a msg to transfer fungible tokens (i.e Coins) between + * ICS20 enabled chains. See ICS Spec here: + * https://github.com/cosmos/ibc/tree/master/spec/app/ics-020-fungible-token-transfer#data-structures + */ + +export interface MsgTransfer { + /** the port on which the packet will be sent */ + sourcePort: string; + /** the channel by which the packet will be sent */ + + sourceChannel: string; + /** the tokens to be transferred */ + + token?: Coin; + /** the sender address */ + + sender: string; + /** the recipient address on the destination chain */ + + receiver: string; + /** + * Timeout height relative to the current block height. + * The timeout is disabled when set to 0. + */ + + timeoutHeight?: Height; + /** + * Timeout timestamp (in nanoseconds) relative to the current block timestamp. + * The timeout is disabled when set to 0. + */ + + timeoutTimestamp: Long; +} +/** + * MsgTransfer defines a msg to transfer fungible tokens (i.e Coins) between + * ICS20 enabled chains. See ICS Spec here: + * https://github.com/cosmos/ibc/tree/master/spec/app/ics-020-fungible-token-transfer#data-structures + */ + +export interface MsgTransferSDKType { + source_port: string; + source_channel: string; + token?: CoinSDKType; + sender: string; + receiver: string; + timeout_height?: HeightSDKType; + timeout_timestamp: Long; +} +/** MsgTransferResponse defines the Msg/Transfer response type. */ + +export interface MsgTransferResponse {} +/** MsgTransferResponse defines the Msg/Transfer response type. */ + +export interface MsgTransferResponseSDKType {} + +function createBaseMsgTransfer(): MsgTransfer { + return { + sourcePort: "", + sourceChannel: "", + token: undefined, + sender: "", + receiver: "", + timeoutHeight: undefined, + timeoutTimestamp: Long.UZERO + }; +} + +export const MsgTransfer = { + encode(message: MsgTransfer, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.sourcePort !== "") { + writer.uint32(10).string(message.sourcePort); + } + + if (message.sourceChannel !== "") { + writer.uint32(18).string(message.sourceChannel); + } + + if (message.token !== undefined) { + Coin.encode(message.token, writer.uint32(26).fork()).ldelim(); + } + + if (message.sender !== "") { + writer.uint32(34).string(message.sender); + } + + if (message.receiver !== "") { + writer.uint32(42).string(message.receiver); + } + + if (message.timeoutHeight !== undefined) { + Height.encode(message.timeoutHeight, writer.uint32(50).fork()).ldelim(); + } + + if (!message.timeoutTimestamp.isZero()) { + writer.uint32(56).uint64(message.timeoutTimestamp); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgTransfer { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgTransfer(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sourcePort = reader.string(); + break; + + case 2: + message.sourceChannel = reader.string(); + break; + + case 3: + message.token = Coin.decode(reader, reader.uint32()); + break; + + case 4: + message.sender = reader.string(); + break; + + case 5: + message.receiver = reader.string(); + break; + + case 6: + message.timeoutHeight = Height.decode(reader, reader.uint32()); + break; + + case 7: + message.timeoutTimestamp = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgTransfer { + const message = createBaseMsgTransfer(); + message.sourcePort = object.sourcePort ?? ""; + message.sourceChannel = object.sourceChannel ?? ""; + message.token = object.token !== undefined && object.token !== null ? Coin.fromPartial(object.token) : undefined; + message.sender = object.sender ?? ""; + message.receiver = object.receiver ?? ""; + message.timeoutHeight = object.timeoutHeight !== undefined && object.timeoutHeight !== null ? Height.fromPartial(object.timeoutHeight) : undefined; + message.timeoutTimestamp = object.timeoutTimestamp !== undefined && object.timeoutTimestamp !== null ? Long.fromValue(object.timeoutTimestamp) : Long.UZERO; + return message; + } + +}; + +function createBaseMsgTransferResponse(): MsgTransferResponse { + return {}; +} + +export const MsgTransferResponse = { + encode(_: MsgTransferResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgTransferResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgTransferResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgTransferResponse { + const message = createBaseMsgTransferResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/applications/transfer/v2/packet.ts b/packages/codegen/src/ibc/applications/transfer/v2/packet.ts new file mode 100644 index 00000000..44f2aaa7 --- /dev/null +++ b/packages/codegen/src/ibc/applications/transfer/v2/packet.ts @@ -0,0 +1,108 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * FungibleTokenPacketData defines a struct for the packet payload + * See FungibleTokenPacketData spec: + * https://github.com/cosmos/ibc/tree/master/spec/app/ics-020-fungible-token-transfer#data-structures + */ + +export interface FungibleTokenPacketData { + /** the token denomination to be transferred */ + denom: string; + /** the token amount to be transferred */ + + amount: string; + /** the sender address */ + + sender: string; + /** the recipient address on the destination chain */ + + receiver: string; +} +/** + * FungibleTokenPacketData defines a struct for the packet payload + * See FungibleTokenPacketData spec: + * https://github.com/cosmos/ibc/tree/master/spec/app/ics-020-fungible-token-transfer#data-structures + */ + +export interface FungibleTokenPacketDataSDKType { + denom: string; + amount: string; + sender: string; + receiver: string; +} + +function createBaseFungibleTokenPacketData(): FungibleTokenPacketData { + return { + denom: "", + amount: "", + sender: "", + receiver: "" + }; +} + +export const FungibleTokenPacketData = { + encode(message: FungibleTokenPacketData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denom !== "") { + writer.uint32(10).string(message.denom); + } + + if (message.amount !== "") { + writer.uint32(18).string(message.amount); + } + + if (message.sender !== "") { + writer.uint32(26).string(message.sender); + } + + if (message.receiver !== "") { + writer.uint32(34).string(message.receiver); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FungibleTokenPacketData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFungibleTokenPacketData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + + case 2: + message.amount = reader.string(); + break; + + case 3: + message.sender = reader.string(); + break; + + case 4: + message.receiver = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): FungibleTokenPacketData { + const message = createBaseFungibleTokenPacketData(); + message.denom = object.denom ?? ""; + message.amount = object.amount ?? ""; + message.sender = object.sender ?? ""; + message.receiver = object.receiver ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/bundle.ts b/packages/codegen/src/ibc/bundle.ts new file mode 100644 index 00000000..63720a3b --- /dev/null +++ b/packages/codegen/src/ibc/bundle.ts @@ -0,0 +1,121 @@ +import * as _110 from "./applications/transfer/v1/genesis"; +import * as _111 from "./applications/transfer/v1/query"; +import * as _112 from "./applications/transfer/v1/transfer"; +import * as _113 from "./applications/transfer/v1/tx"; +import * as _114 from "./applications/transfer/v2/packet"; +import * as _115 from "./core/channel/v1/channel"; +import * as _116 from "./core/channel/v1/genesis"; +import * as _117 from "./core/channel/v1/query"; +import * as _118 from "./core/channel/v1/tx"; +import * as _119 from "./core/client/v1/client"; +import * as _120 from "./core/client/v1/genesis"; +import * as _121 from "./core/client/v1/query"; +import * as _122 from "./core/client/v1/tx"; +import * as _123 from "./core/commitment/v1/commitment"; +import * as _124 from "./core/connection/v1/connection"; +import * as _125 from "./core/connection/v1/genesis"; +import * as _126 from "./core/connection/v1/query"; +import * as _127 from "./core/connection/v1/tx"; +import * as _128 from "./core/port/v1/query"; +import * as _129 from "./core/types/v1/genesis"; +import * as _130 from "./lightclients/localhost/v1/localhost"; +import * as _131 from "./lightclients/solomachine/v1/solomachine"; +import * as _132 from "./lightclients/solomachine/v2/solomachine"; +import * as _133 from "./lightclients/tendermint/v1/tendermint"; +import * as _197 from "./applications/transfer/v1/query.lcd"; +import * as _198 from "./core/channel/v1/query.lcd"; +import * as _199 from "./core/client/v1/query.lcd"; +import * as _200 from "./core/connection/v1/query.lcd"; +import * as _201 from "./applications/transfer/v1/query.rpc.Query"; +import * as _202 from "./core/channel/v1/query.rpc.Query"; +import * as _203 from "./core/client/v1/query.rpc.Query"; +import * as _204 from "./core/connection/v1/query.rpc.Query"; +import * as _205 from "./core/port/v1/query.rpc.Query"; +import * as _206 from "./applications/transfer/v1/tx.rpc.msg"; +import * as _207 from "./core/channel/v1/tx.rpc.msg"; +import * as _208 from "./core/client/v1/tx.rpc.msg"; +import * as _209 from "./core/connection/v1/tx.rpc.msg"; +import * as _216 from "./lcd"; +import * as _217 from "./rpc.query"; +import * as _218 from "./rpc.tx"; +export namespace ibc { + export namespace applications { + export namespace transfer { + export const v1 = { ..._110, + ..._111, + ..._112, + ..._113, + ..._197, + ..._201, + ..._206 + }; + export const v2 = { ..._114 + }; + } + } + export namespace core { + export namespace channel { + export const v1 = { ..._115, + ..._116, + ..._117, + ..._118, + ..._198, + ..._202, + ..._207 + }; + } + export namespace client { + export const v1 = { ..._119, + ..._120, + ..._121, + ..._122, + ..._199, + ..._203, + ..._208 + }; + } + export namespace commitment { + export const v1 = { ..._123 + }; + } + export namespace connection { + export const v1 = { ..._124, + ..._125, + ..._126, + ..._127, + ..._200, + ..._204, + ..._209 + }; + } + export namespace port { + export const v1 = { ..._128, + ..._205 + }; + } + export namespace types { + export const v1 = { ..._129 + }; + } + } + export namespace lightclients { + export namespace localhost { + export const v1 = { ..._130 + }; + } + export namespace solomachine { + export const v1 = { ..._131 + }; + export const v2 = { ..._132 + }; + } + export namespace tendermint { + export const v1 = { ..._133 + }; + } + } + export const ClientFactory = { ..._216, + ..._217, + ..._218 + }; +} \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/channel/v1/channel.ts b/packages/codegen/src/ibc/core/channel/v1/channel.ts new file mode 100644 index 00000000..b84ffb9a --- /dev/null +++ b/packages/codegen/src/ibc/core/channel/v1/channel.ts @@ -0,0 +1,826 @@ +import { Height, HeightSDKType } from "../../client/v1/client"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * State defines if a channel is in one of the following states: + * CLOSED, INIT, TRYOPEN, OPEN or UNINITIALIZED. + */ + +export enum State { + /** STATE_UNINITIALIZED_UNSPECIFIED - Default State */ + STATE_UNINITIALIZED_UNSPECIFIED = 0, + + /** STATE_INIT - A channel has just started the opening handshake. */ + STATE_INIT = 1, + + /** STATE_TRYOPEN - A channel has acknowledged the handshake step on the counterparty chain. */ + STATE_TRYOPEN = 2, + + /** + * STATE_OPEN - A channel has completed the handshake. Open channels are + * ready to send and receive packets. + */ + STATE_OPEN = 3, + + /** + * STATE_CLOSED - A channel has been closed and can no longer be used to send or receive + * packets. + */ + STATE_CLOSED = 4, + UNRECOGNIZED = -1, +} +export const StateSDKType = State; +export function stateFromJSON(object: any): State { + switch (object) { + case 0: + case "STATE_UNINITIALIZED_UNSPECIFIED": + return State.STATE_UNINITIALIZED_UNSPECIFIED; + + case 1: + case "STATE_INIT": + return State.STATE_INIT; + + case 2: + case "STATE_TRYOPEN": + return State.STATE_TRYOPEN; + + case 3: + case "STATE_OPEN": + return State.STATE_OPEN; + + case 4: + case "STATE_CLOSED": + return State.STATE_CLOSED; + + case -1: + case "UNRECOGNIZED": + default: + return State.UNRECOGNIZED; + } +} +export function stateToJSON(object: State): string { + switch (object) { + case State.STATE_UNINITIALIZED_UNSPECIFIED: + return "STATE_UNINITIALIZED_UNSPECIFIED"; + + case State.STATE_INIT: + return "STATE_INIT"; + + case State.STATE_TRYOPEN: + return "STATE_TRYOPEN"; + + case State.STATE_OPEN: + return "STATE_OPEN"; + + case State.STATE_CLOSED: + return "STATE_CLOSED"; + + case State.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** Order defines if a channel is ORDERED or UNORDERED */ + +export enum Order { + /** ORDER_NONE_UNSPECIFIED - zero-value for channel ordering */ + ORDER_NONE_UNSPECIFIED = 0, + + /** + * ORDER_UNORDERED - packets can be delivered in any order, which may differ from the order in + * which they were sent. + */ + ORDER_UNORDERED = 1, + + /** ORDER_ORDERED - packets are delivered exactly in the order which they were sent */ + ORDER_ORDERED = 2, + UNRECOGNIZED = -1, +} +export const OrderSDKType = Order; +export function orderFromJSON(object: any): Order { + switch (object) { + case 0: + case "ORDER_NONE_UNSPECIFIED": + return Order.ORDER_NONE_UNSPECIFIED; + + case 1: + case "ORDER_UNORDERED": + return Order.ORDER_UNORDERED; + + case 2: + case "ORDER_ORDERED": + return Order.ORDER_ORDERED; + + case -1: + case "UNRECOGNIZED": + default: + return Order.UNRECOGNIZED; + } +} +export function orderToJSON(object: Order): string { + switch (object) { + case Order.ORDER_NONE_UNSPECIFIED: + return "ORDER_NONE_UNSPECIFIED"; + + case Order.ORDER_UNORDERED: + return "ORDER_UNORDERED"; + + case Order.ORDER_ORDERED: + return "ORDER_ORDERED"; + + case Order.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * Channel defines pipeline for exactly-once packet delivery between specific + * modules on separate blockchains, which has at least one end capable of + * sending packets and one end capable of receiving packets. + */ + +export interface Channel { + /** current state of the channel end */ + state: State; + /** whether the channel is ordered or unordered */ + + ordering: Order; + /** counterparty channel end */ + + counterparty?: Counterparty; + /** + * list of connection identifiers, in order, along which packets sent on + * this channel will travel + */ + + connectionHops: string[]; + /** opaque channel version, which is agreed upon during the handshake */ + + version: string; +} +/** + * Channel defines pipeline for exactly-once packet delivery between specific + * modules on separate blockchains, which has at least one end capable of + * sending packets and one end capable of receiving packets. + */ + +export interface ChannelSDKType { + state: State; + ordering: Order; + counterparty?: CounterpartySDKType; + connection_hops: string[]; + version: string; +} +/** + * IdentifiedChannel defines a channel with additional port and channel + * identifier fields. + */ + +export interface IdentifiedChannel { + /** current state of the channel end */ + state: State; + /** whether the channel is ordered or unordered */ + + ordering: Order; + /** counterparty channel end */ + + counterparty?: Counterparty; + /** + * list of connection identifiers, in order, along which packets sent on + * this channel will travel + */ + + connectionHops: string[]; + /** opaque channel version, which is agreed upon during the handshake */ + + version: string; + /** port identifier */ + + portId: string; + /** channel identifier */ + + channelId: string; +} +/** + * IdentifiedChannel defines a channel with additional port and channel + * identifier fields. + */ + +export interface IdentifiedChannelSDKType { + state: State; + ordering: Order; + counterparty?: CounterpartySDKType; + connection_hops: string[]; + version: string; + port_id: string; + channel_id: string; +} +/** Counterparty defines a channel end counterparty */ + +export interface Counterparty { + /** port on the counterparty chain which owns the other end of the channel. */ + portId: string; + /** channel end on the counterparty chain */ + + channelId: string; +} +/** Counterparty defines a channel end counterparty */ + +export interface CounterpartySDKType { + port_id: string; + channel_id: string; +} +/** Packet defines a type that carries data across different chains through IBC */ + +export interface Packet { + /** + * number corresponds to the order of sends and receives, where a Packet + * with an earlier sequence number must be sent and received before a Packet + * with a later sequence number. + */ + sequence: Long; + /** identifies the port on the sending chain. */ + + sourcePort: string; + /** identifies the channel end on the sending chain. */ + + sourceChannel: string; + /** identifies the port on the receiving chain. */ + + destinationPort: string; + /** identifies the channel end on the receiving chain. */ + + destinationChannel: string; + /** actual opaque bytes transferred directly to the application module */ + + data: Uint8Array; + /** block height after which the packet times out */ + + timeoutHeight?: Height; + /** block timestamp (in nanoseconds) after which the packet times out */ + + timeoutTimestamp: Long; +} +/** Packet defines a type that carries data across different chains through IBC */ + +export interface PacketSDKType { + sequence: Long; + source_port: string; + source_channel: string; + destination_port: string; + destination_channel: string; + data: Uint8Array; + timeout_height?: HeightSDKType; + timeout_timestamp: Long; +} +/** + * PacketState defines the generic type necessary to retrieve and store + * packet commitments, acknowledgements, and receipts. + * Caller is responsible for knowing the context necessary to interpret this + * state as a commitment, acknowledgement, or a receipt. + */ + +export interface PacketState { + /** channel port identifier. */ + portId: string; + /** channel unique identifier. */ + + channelId: string; + /** packet sequence. */ + + sequence: Long; + /** embedded data that represents packet state. */ + + data: Uint8Array; +} +/** + * PacketState defines the generic type necessary to retrieve and store + * packet commitments, acknowledgements, and receipts. + * Caller is responsible for knowing the context necessary to interpret this + * state as a commitment, acknowledgement, or a receipt. + */ + +export interface PacketStateSDKType { + port_id: string; + channel_id: string; + sequence: Long; + data: Uint8Array; +} +/** + * Acknowledgement is the recommended acknowledgement format to be used by + * app-specific protocols. + * NOTE: The field numbers 21 and 22 were explicitly chosen to avoid accidental + * conflicts with other protobuf message formats used for acknowledgements. + * The first byte of any message with this format will be the non-ASCII values + * `0xaa` (result) or `0xb2` (error). Implemented as defined by ICS: + * https://github.com/cosmos/ibc/tree/master/spec/core/ics-004-channel-and-packet-semantics#acknowledgement-envelope + */ + +export interface Acknowledgement { + result?: Uint8Array; + error?: string; +} +/** + * Acknowledgement is the recommended acknowledgement format to be used by + * app-specific protocols. + * NOTE: The field numbers 21 and 22 were explicitly chosen to avoid accidental + * conflicts with other protobuf message formats used for acknowledgements. + * The first byte of any message with this format will be the non-ASCII values + * `0xaa` (result) or `0xb2` (error). Implemented as defined by ICS: + * https://github.com/cosmos/ibc/tree/master/spec/core/ics-004-channel-and-packet-semantics#acknowledgement-envelope + */ + +export interface AcknowledgementSDKType { + result?: Uint8Array; + error?: string; +} + +function createBaseChannel(): Channel { + return { + state: 0, + ordering: 0, + counterparty: undefined, + connectionHops: [], + version: "" + }; +} + +export const Channel = { + encode(message: Channel, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.state !== 0) { + writer.uint32(8).int32(message.state); + } + + if (message.ordering !== 0) { + writer.uint32(16).int32(message.ordering); + } + + if (message.counterparty !== undefined) { + Counterparty.encode(message.counterparty, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.connectionHops) { + writer.uint32(34).string(v!); + } + + if (message.version !== "") { + writer.uint32(42).string(message.version); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Channel { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseChannel(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.state = (reader.int32() as any); + break; + + case 2: + message.ordering = (reader.int32() as any); + break; + + case 3: + message.counterparty = Counterparty.decode(reader, reader.uint32()); + break; + + case 4: + message.connectionHops.push(reader.string()); + break; + + case 5: + message.version = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Channel { + const message = createBaseChannel(); + message.state = object.state ?? 0; + message.ordering = object.ordering ?? 0; + message.counterparty = object.counterparty !== undefined && object.counterparty !== null ? Counterparty.fromPartial(object.counterparty) : undefined; + message.connectionHops = object.connectionHops?.map(e => e) || []; + message.version = object.version ?? ""; + return message; + } + +}; + +function createBaseIdentifiedChannel(): IdentifiedChannel { + return { + state: 0, + ordering: 0, + counterparty: undefined, + connectionHops: [], + version: "", + portId: "", + channelId: "" + }; +} + +export const IdentifiedChannel = { + encode(message: IdentifiedChannel, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.state !== 0) { + writer.uint32(8).int32(message.state); + } + + if (message.ordering !== 0) { + writer.uint32(16).int32(message.ordering); + } + + if (message.counterparty !== undefined) { + Counterparty.encode(message.counterparty, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.connectionHops) { + writer.uint32(34).string(v!); + } + + if (message.version !== "") { + writer.uint32(42).string(message.version); + } + + if (message.portId !== "") { + writer.uint32(50).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(58).string(message.channelId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): IdentifiedChannel { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseIdentifiedChannel(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.state = (reader.int32() as any); + break; + + case 2: + message.ordering = (reader.int32() as any); + break; + + case 3: + message.counterparty = Counterparty.decode(reader, reader.uint32()); + break; + + case 4: + message.connectionHops.push(reader.string()); + break; + + case 5: + message.version = reader.string(); + break; + + case 6: + message.portId = reader.string(); + break; + + case 7: + message.channelId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): IdentifiedChannel { + const message = createBaseIdentifiedChannel(); + message.state = object.state ?? 0; + message.ordering = object.ordering ?? 0; + message.counterparty = object.counterparty !== undefined && object.counterparty !== null ? Counterparty.fromPartial(object.counterparty) : undefined; + message.connectionHops = object.connectionHops?.map(e => e) || []; + message.version = object.version ?? ""; + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + return message; + } + +}; + +function createBaseCounterparty(): Counterparty { + return { + portId: "", + channelId: "" + }; +} + +export const Counterparty = { + encode(message: Counterparty, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Counterparty { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCounterparty(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Counterparty { + const message = createBaseCounterparty(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + return message; + } + +}; + +function createBasePacket(): Packet { + return { + sequence: Long.UZERO, + sourcePort: "", + sourceChannel: "", + destinationPort: "", + destinationChannel: "", + data: new Uint8Array(), + timeoutHeight: undefined, + timeoutTimestamp: Long.UZERO + }; +} + +export const Packet = { + encode(message: Packet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.sequence.isZero()) { + writer.uint32(8).uint64(message.sequence); + } + + if (message.sourcePort !== "") { + writer.uint32(18).string(message.sourcePort); + } + + if (message.sourceChannel !== "") { + writer.uint32(26).string(message.sourceChannel); + } + + if (message.destinationPort !== "") { + writer.uint32(34).string(message.destinationPort); + } + + if (message.destinationChannel !== "") { + writer.uint32(42).string(message.destinationChannel); + } + + if (message.data.length !== 0) { + writer.uint32(50).bytes(message.data); + } + + if (message.timeoutHeight !== undefined) { + Height.encode(message.timeoutHeight, writer.uint32(58).fork()).ldelim(); + } + + if (!message.timeoutTimestamp.isZero()) { + writer.uint32(64).uint64(message.timeoutTimestamp); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Packet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePacket(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sequence = (reader.uint64() as Long); + break; + + case 2: + message.sourcePort = reader.string(); + break; + + case 3: + message.sourceChannel = reader.string(); + break; + + case 4: + message.destinationPort = reader.string(); + break; + + case 5: + message.destinationChannel = reader.string(); + break; + + case 6: + message.data = reader.bytes(); + break; + + case 7: + message.timeoutHeight = Height.decode(reader, reader.uint32()); + break; + + case 8: + message.timeoutTimestamp = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Packet { + const message = createBasePacket(); + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + message.sourcePort = object.sourcePort ?? ""; + message.sourceChannel = object.sourceChannel ?? ""; + message.destinationPort = object.destinationPort ?? ""; + message.destinationChannel = object.destinationChannel ?? ""; + message.data = object.data ?? new Uint8Array(); + message.timeoutHeight = object.timeoutHeight !== undefined && object.timeoutHeight !== null ? Height.fromPartial(object.timeoutHeight) : undefined; + message.timeoutTimestamp = object.timeoutTimestamp !== undefined && object.timeoutTimestamp !== null ? Long.fromValue(object.timeoutTimestamp) : Long.UZERO; + return message; + } + +}; + +function createBasePacketState(): PacketState { + return { + portId: "", + channelId: "", + sequence: Long.UZERO, + data: new Uint8Array() + }; +} + +export const PacketState = { + encode(message: PacketState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + if (!message.sequence.isZero()) { + writer.uint32(24).uint64(message.sequence); + } + + if (message.data.length !== 0) { + writer.uint32(34).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PacketState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePacketState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + message.sequence = (reader.uint64() as Long); + break; + + case 4: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PacketState { + const message = createBasePacketState(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseAcknowledgement(): Acknowledgement { + return { + result: undefined, + error: undefined + }; +} + +export const Acknowledgement = { + encode(message: Acknowledgement, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.result !== undefined) { + writer.uint32(170).bytes(message.result); + } + + if (message.error !== undefined) { + writer.uint32(178).string(message.error); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Acknowledgement { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAcknowledgement(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 21: + message.result = reader.bytes(); + break; + + case 22: + message.error = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Acknowledgement { + const message = createBaseAcknowledgement(); + message.result = object.result ?? undefined; + message.error = object.error ?? undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/channel/v1/genesis.ts b/packages/codegen/src/ibc/core/channel/v1/genesis.ts new file mode 100644 index 00000000..4d7759f6 --- /dev/null +++ b/packages/codegen/src/ibc/core/channel/v1/genesis.ts @@ -0,0 +1,229 @@ +import { IdentifiedChannel, IdentifiedChannelSDKType, PacketState, PacketStateSDKType } from "./channel"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisState defines the ibc channel submodule's genesis state. */ + +export interface GenesisState { + channels: IdentifiedChannel[]; + acknowledgements: PacketState[]; + commitments: PacketState[]; + receipts: PacketState[]; + sendSequences: PacketSequence[]; + recvSequences: PacketSequence[]; + ackSequences: PacketSequence[]; + /** the sequence for the next generated channel identifier */ + + nextChannelSequence: Long; +} +/** GenesisState defines the ibc channel submodule's genesis state. */ + +export interface GenesisStateSDKType { + channels: IdentifiedChannelSDKType[]; + acknowledgements: PacketStateSDKType[]; + commitments: PacketStateSDKType[]; + receipts: PacketStateSDKType[]; + send_sequences: PacketSequenceSDKType[]; + recv_sequences: PacketSequenceSDKType[]; + ack_sequences: PacketSequenceSDKType[]; + next_channel_sequence: Long; +} +/** + * PacketSequence defines the genesis type necessary to retrieve and store + * next send and receive sequences. + */ + +export interface PacketSequence { + portId: string; + channelId: string; + sequence: Long; +} +/** + * PacketSequence defines the genesis type necessary to retrieve and store + * next send and receive sequences. + */ + +export interface PacketSequenceSDKType { + port_id: string; + channel_id: string; + sequence: Long; +} + +function createBaseGenesisState(): GenesisState { + return { + channels: [], + acknowledgements: [], + commitments: [], + receipts: [], + sendSequences: [], + recvSequences: [], + ackSequences: [], + nextChannelSequence: Long.UZERO + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.channels) { + IdentifiedChannel.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.acknowledgements) { + PacketState.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.commitments) { + PacketState.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.receipts) { + PacketState.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + for (const v of message.sendSequences) { + PacketSequence.encode(v!, writer.uint32(42).fork()).ldelim(); + } + + for (const v of message.recvSequences) { + PacketSequence.encode(v!, writer.uint32(50).fork()).ldelim(); + } + + for (const v of message.ackSequences) { + PacketSequence.encode(v!, writer.uint32(58).fork()).ldelim(); + } + + if (!message.nextChannelSequence.isZero()) { + writer.uint32(64).uint64(message.nextChannelSequence); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.channels.push(IdentifiedChannel.decode(reader, reader.uint32())); + break; + + case 2: + message.acknowledgements.push(PacketState.decode(reader, reader.uint32())); + break; + + case 3: + message.commitments.push(PacketState.decode(reader, reader.uint32())); + break; + + case 4: + message.receipts.push(PacketState.decode(reader, reader.uint32())); + break; + + case 5: + message.sendSequences.push(PacketSequence.decode(reader, reader.uint32())); + break; + + case 6: + message.recvSequences.push(PacketSequence.decode(reader, reader.uint32())); + break; + + case 7: + message.ackSequences.push(PacketSequence.decode(reader, reader.uint32())); + break; + + case 8: + message.nextChannelSequence = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.channels = object.channels?.map(e => IdentifiedChannel.fromPartial(e)) || []; + message.acknowledgements = object.acknowledgements?.map(e => PacketState.fromPartial(e)) || []; + message.commitments = object.commitments?.map(e => PacketState.fromPartial(e)) || []; + message.receipts = object.receipts?.map(e => PacketState.fromPartial(e)) || []; + message.sendSequences = object.sendSequences?.map(e => PacketSequence.fromPartial(e)) || []; + message.recvSequences = object.recvSequences?.map(e => PacketSequence.fromPartial(e)) || []; + message.ackSequences = object.ackSequences?.map(e => PacketSequence.fromPartial(e)) || []; + message.nextChannelSequence = object.nextChannelSequence !== undefined && object.nextChannelSequence !== null ? Long.fromValue(object.nextChannelSequence) : Long.UZERO; + return message; + } + +}; + +function createBasePacketSequence(): PacketSequence { + return { + portId: "", + channelId: "", + sequence: Long.UZERO + }; +} + +export const PacketSequence = { + encode(message: PacketSequence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + if (!message.sequence.isZero()) { + writer.uint32(24).uint64(message.sequence); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PacketSequence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePacketSequence(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + message.sequence = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PacketSequence { + const message = createBasePacketSequence(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/channel/v1/query.lcd.ts b/packages/codegen/src/ibc/core/channel/v1/query.lcd.ts new file mode 100644 index 00000000..eb1215bf --- /dev/null +++ b/packages/codegen/src/ibc/core/channel/v1/query.lcd.ts @@ -0,0 +1,165 @@ +import { setPaginationParams } from "../../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryChannelRequest, QueryChannelResponseSDKType, QueryChannelsRequest, QueryChannelsResponseSDKType, QueryConnectionChannelsRequest, QueryConnectionChannelsResponseSDKType, QueryChannelClientStateRequest, QueryChannelClientStateResponseSDKType, QueryChannelConsensusStateRequest, QueryChannelConsensusStateResponseSDKType, QueryPacketCommitmentRequest, QueryPacketCommitmentResponseSDKType, QueryPacketCommitmentsRequest, QueryPacketCommitmentsResponseSDKType, QueryPacketReceiptRequest, QueryPacketReceiptResponseSDKType, QueryPacketAcknowledgementRequest, QueryPacketAcknowledgementResponseSDKType, QueryPacketAcknowledgementsRequest, QueryPacketAcknowledgementsResponseSDKType, QueryUnreceivedPacketsRequest, QueryUnreceivedPacketsResponseSDKType, QueryUnreceivedAcksRequest, QueryUnreceivedAcksResponseSDKType, QueryNextSequenceReceiveRequest, QueryNextSequenceReceiveResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.channel = this.channel.bind(this); + this.channels = this.channels.bind(this); + this.connectionChannels = this.connectionChannels.bind(this); + this.channelClientState = this.channelClientState.bind(this); + this.channelConsensusState = this.channelConsensusState.bind(this); + this.packetCommitment = this.packetCommitment.bind(this); + this.packetCommitments = this.packetCommitments.bind(this); + this.packetReceipt = this.packetReceipt.bind(this); + this.packetAcknowledgement = this.packetAcknowledgement.bind(this); + this.packetAcknowledgements = this.packetAcknowledgements.bind(this); + this.unreceivedPackets = this.unreceivedPackets.bind(this); + this.unreceivedAcks = this.unreceivedAcks.bind(this); + this.nextSequenceReceive = this.nextSequenceReceive.bind(this); + } + /* Channel queries an IBC Channel. */ + + + async channel(params: QueryChannelRequest): Promise { + const endpoint = `ibc/core/channel/v1/channels/${params.channelId}/ports/${params.portId}`; + return await this.req.get(endpoint); + } + /* Channels queries all the IBC channels of a chain. */ + + + async channels(params: QueryChannelsRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `ibc/core/channel/v1/channels`; + return await this.req.get(endpoint, options); + } + /* ConnectionChannels queries all the channels associated with a connection + end. */ + + + async connectionChannels(params: QueryConnectionChannelsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `ibc/core/channel/v1/connections/${params.connection}/channels`; + return await this.req.get(endpoint, options); + } + /* ChannelClientState queries for the client state for the channel associated + with the provided channel identifiers. */ + + + async channelClientState(params: QueryChannelClientStateRequest): Promise { + const endpoint = `ibc/core/channel/v1/channels/${params.channelId}/ports/${params.portId}/client_state`; + return await this.req.get(endpoint); + } + /* ChannelConsensusState queries for the consensus state for the channel + associated with the provided channel identifiers. */ + + + async channelConsensusState(params: QueryChannelConsensusStateRequest): Promise { + const endpoint = `ibc/core/channel/v1/channels/${params.channelId}/ports/${params.portId}/consensus_state/revision/${params.revisionNumber}/height/${params.revisionHeight}`; + return await this.req.get(endpoint); + } + /* PacketCommitment queries a stored packet commitment hash. */ + + + async packetCommitment(params: QueryPacketCommitmentRequest): Promise { + const endpoint = `ibc/core/channel/v1/channels/${params.channelId}/ports/${params.portId}/packet_commitments/${params.sequence}`; + return await this.req.get(endpoint); + } + /* PacketCommitments returns all the packet commitments hashes associated + with a channel. */ + + + async packetCommitments(params: QueryPacketCommitmentsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `ibc/core/channel/v1/channels/${params.channelId}/ports/${params.portId}/packet_commitments`; + return await this.req.get(endpoint, options); + } + /* PacketReceipt queries if a given packet sequence has been received on the + queried chain */ + + + async packetReceipt(params: QueryPacketReceiptRequest): Promise { + const endpoint = `ibc/core/channel/v1/channels/${params.channelId}/ports/${params.portId}/packet_receipts/${params.sequence}`; + return await this.req.get(endpoint); + } + /* PacketAcknowledgement queries a stored packet acknowledgement hash. */ + + + async packetAcknowledgement(params: QueryPacketAcknowledgementRequest): Promise { + const endpoint = `ibc/core/channel/v1/channels/${params.channelId}/ports/${params.portId}/packet_acks/${params.sequence}`; + return await this.req.get(endpoint); + } + /* PacketAcknowledgements returns all the packet acknowledgements associated + with a channel. */ + + + async packetAcknowledgements(params: QueryPacketAcknowledgementsRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + if (typeof params?.packetCommitmentSequences !== "undefined") { + options.params.packet_commitment_sequences = params.packetCommitmentSequences; + } + + const endpoint = `ibc/core/channel/v1/channels/${params.channelId}/ports/${params.portId}/packet_acknowledgements`; + return await this.req.get(endpoint, options); + } + /* UnreceivedPackets returns all the unreceived IBC packets associated with a + channel and sequences. */ + + + async unreceivedPackets(params: QueryUnreceivedPacketsRequest): Promise { + const endpoint = `ibc/core/channel/v1/channels/${params.channelId}/ports/${params.portId}/packet_commitments/${params.packetCommitmentSequences}/unreceived_packets`; + return await this.req.get(endpoint); + } + /* UnreceivedAcks returns all the unreceived IBC acknowledgements associated + with a channel and sequences. */ + + + async unreceivedAcks(params: QueryUnreceivedAcksRequest): Promise { + const endpoint = `ibc/core/channel/v1/channels/${params.channelId}/ports/${params.portId}/packet_commitments/${params.packetAckSequences}/unreceived_acks`; + return await this.req.get(endpoint); + } + /* NextSequenceReceive returns the next receive sequence for a given channel. */ + + + async nextSequenceReceive(params: QueryNextSequenceReceiveRequest): Promise { + const endpoint = `ibc/core/channel/v1/channels/${params.channelId}/ports/${params.portId}/next_sequence`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/channel/v1/query.rpc.Query.ts b/packages/codegen/src/ibc/core/channel/v1/query.rpc.Query.ts new file mode 100644 index 00000000..4fbeb5b7 --- /dev/null +++ b/packages/codegen/src/ibc/core/channel/v1/query.rpc.Query.ts @@ -0,0 +1,229 @@ +import { Rpc } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryChannelRequest, QueryChannelResponse, QueryChannelsRequest, QueryChannelsResponse, QueryConnectionChannelsRequest, QueryConnectionChannelsResponse, QueryChannelClientStateRequest, QueryChannelClientStateResponse, QueryChannelConsensusStateRequest, QueryChannelConsensusStateResponse, QueryPacketCommitmentRequest, QueryPacketCommitmentResponse, QueryPacketCommitmentsRequest, QueryPacketCommitmentsResponse, QueryPacketReceiptRequest, QueryPacketReceiptResponse, QueryPacketAcknowledgementRequest, QueryPacketAcknowledgementResponse, QueryPacketAcknowledgementsRequest, QueryPacketAcknowledgementsResponse, QueryUnreceivedPacketsRequest, QueryUnreceivedPacketsResponse, QueryUnreceivedAcksRequest, QueryUnreceivedAcksResponse, QueryNextSequenceReceiveRequest, QueryNextSequenceReceiveResponse } from "./query"; +/** Query provides defines the gRPC querier service */ + +export interface Query { + /** Channel queries an IBC Channel. */ + channel(request: QueryChannelRequest): Promise; + /** Channels queries all the IBC channels of a chain. */ + + channels(request?: QueryChannelsRequest): Promise; + /** + * ConnectionChannels queries all the channels associated with a connection + * end. + */ + + connectionChannels(request: QueryConnectionChannelsRequest): Promise; + /** + * ChannelClientState queries for the client state for the channel associated + * with the provided channel identifiers. + */ + + channelClientState(request: QueryChannelClientStateRequest): Promise; + /** + * ChannelConsensusState queries for the consensus state for the channel + * associated with the provided channel identifiers. + */ + + channelConsensusState(request: QueryChannelConsensusStateRequest): Promise; + /** PacketCommitment queries a stored packet commitment hash. */ + + packetCommitment(request: QueryPacketCommitmentRequest): Promise; + /** + * PacketCommitments returns all the packet commitments hashes associated + * with a channel. + */ + + packetCommitments(request: QueryPacketCommitmentsRequest): Promise; + /** + * PacketReceipt queries if a given packet sequence has been received on the + * queried chain + */ + + packetReceipt(request: QueryPacketReceiptRequest): Promise; + /** PacketAcknowledgement queries a stored packet acknowledgement hash. */ + + packetAcknowledgement(request: QueryPacketAcknowledgementRequest): Promise; + /** + * PacketAcknowledgements returns all the packet acknowledgements associated + * with a channel. + */ + + packetAcknowledgements(request: QueryPacketAcknowledgementsRequest): Promise; + /** + * UnreceivedPackets returns all the unreceived IBC packets associated with a + * channel and sequences. + */ + + unreceivedPackets(request: QueryUnreceivedPacketsRequest): Promise; + /** + * UnreceivedAcks returns all the unreceived IBC acknowledgements associated + * with a channel and sequences. + */ + + unreceivedAcks(request: QueryUnreceivedAcksRequest): Promise; + /** NextSequenceReceive returns the next receive sequence for a given channel. */ + + nextSequenceReceive(request: QueryNextSequenceReceiveRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.channel = this.channel.bind(this); + this.channels = this.channels.bind(this); + this.connectionChannels = this.connectionChannels.bind(this); + this.channelClientState = this.channelClientState.bind(this); + this.channelConsensusState = this.channelConsensusState.bind(this); + this.packetCommitment = this.packetCommitment.bind(this); + this.packetCommitments = this.packetCommitments.bind(this); + this.packetReceipt = this.packetReceipt.bind(this); + this.packetAcknowledgement = this.packetAcknowledgement.bind(this); + this.packetAcknowledgements = this.packetAcknowledgements.bind(this); + this.unreceivedPackets = this.unreceivedPackets.bind(this); + this.unreceivedAcks = this.unreceivedAcks.bind(this); + this.nextSequenceReceive = this.nextSequenceReceive.bind(this); + } + + channel(request: QueryChannelRequest): Promise { + const data = QueryChannelRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "Channel", data); + return promise.then(data => QueryChannelResponse.decode(new _m0.Reader(data))); + } + + channels(request: QueryChannelsRequest = { + pagination: undefined + }): Promise { + const data = QueryChannelsRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "Channels", data); + return promise.then(data => QueryChannelsResponse.decode(new _m0.Reader(data))); + } + + connectionChannels(request: QueryConnectionChannelsRequest): Promise { + const data = QueryConnectionChannelsRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "ConnectionChannels", data); + return promise.then(data => QueryConnectionChannelsResponse.decode(new _m0.Reader(data))); + } + + channelClientState(request: QueryChannelClientStateRequest): Promise { + const data = QueryChannelClientStateRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "ChannelClientState", data); + return promise.then(data => QueryChannelClientStateResponse.decode(new _m0.Reader(data))); + } + + channelConsensusState(request: QueryChannelConsensusStateRequest): Promise { + const data = QueryChannelConsensusStateRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "ChannelConsensusState", data); + return promise.then(data => QueryChannelConsensusStateResponse.decode(new _m0.Reader(data))); + } + + packetCommitment(request: QueryPacketCommitmentRequest): Promise { + const data = QueryPacketCommitmentRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "PacketCommitment", data); + return promise.then(data => QueryPacketCommitmentResponse.decode(new _m0.Reader(data))); + } + + packetCommitments(request: QueryPacketCommitmentsRequest): Promise { + const data = QueryPacketCommitmentsRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "PacketCommitments", data); + return promise.then(data => QueryPacketCommitmentsResponse.decode(new _m0.Reader(data))); + } + + packetReceipt(request: QueryPacketReceiptRequest): Promise { + const data = QueryPacketReceiptRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "PacketReceipt", data); + return promise.then(data => QueryPacketReceiptResponse.decode(new _m0.Reader(data))); + } + + packetAcknowledgement(request: QueryPacketAcknowledgementRequest): Promise { + const data = QueryPacketAcknowledgementRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "PacketAcknowledgement", data); + return promise.then(data => QueryPacketAcknowledgementResponse.decode(new _m0.Reader(data))); + } + + packetAcknowledgements(request: QueryPacketAcknowledgementsRequest): Promise { + const data = QueryPacketAcknowledgementsRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "PacketAcknowledgements", data); + return promise.then(data => QueryPacketAcknowledgementsResponse.decode(new _m0.Reader(data))); + } + + unreceivedPackets(request: QueryUnreceivedPacketsRequest): Promise { + const data = QueryUnreceivedPacketsRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "UnreceivedPackets", data); + return promise.then(data => QueryUnreceivedPacketsResponse.decode(new _m0.Reader(data))); + } + + unreceivedAcks(request: QueryUnreceivedAcksRequest): Promise { + const data = QueryUnreceivedAcksRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "UnreceivedAcks", data); + return promise.then(data => QueryUnreceivedAcksResponse.decode(new _m0.Reader(data))); + } + + nextSequenceReceive(request: QueryNextSequenceReceiveRequest): Promise { + const data = QueryNextSequenceReceiveRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Query", "NextSequenceReceive", data); + return promise.then(data => QueryNextSequenceReceiveResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + channel(request: QueryChannelRequest): Promise { + return queryService.channel(request); + }, + + channels(request?: QueryChannelsRequest): Promise { + return queryService.channels(request); + }, + + connectionChannels(request: QueryConnectionChannelsRequest): Promise { + return queryService.connectionChannels(request); + }, + + channelClientState(request: QueryChannelClientStateRequest): Promise { + return queryService.channelClientState(request); + }, + + channelConsensusState(request: QueryChannelConsensusStateRequest): Promise { + return queryService.channelConsensusState(request); + }, + + packetCommitment(request: QueryPacketCommitmentRequest): Promise { + return queryService.packetCommitment(request); + }, + + packetCommitments(request: QueryPacketCommitmentsRequest): Promise { + return queryService.packetCommitments(request); + }, + + packetReceipt(request: QueryPacketReceiptRequest): Promise { + return queryService.packetReceipt(request); + }, + + packetAcknowledgement(request: QueryPacketAcknowledgementRequest): Promise { + return queryService.packetAcknowledgement(request); + }, + + packetAcknowledgements(request: QueryPacketAcknowledgementsRequest): Promise { + return queryService.packetAcknowledgements(request); + }, + + unreceivedPackets(request: QueryUnreceivedPacketsRequest): Promise { + return queryService.unreceivedPackets(request); + }, + + unreceivedAcks(request: QueryUnreceivedAcksRequest): Promise { + return queryService.unreceivedAcks(request); + }, + + nextSequenceReceive(request: QueryNextSequenceReceiveRequest): Promise { + return queryService.nextSequenceReceive(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/channel/v1/query.ts b/packages/codegen/src/ibc/core/channel/v1/query.ts new file mode 100644 index 00000000..c63fc1d4 --- /dev/null +++ b/packages/codegen/src/ibc/core/channel/v1/query.ts @@ -0,0 +1,2326 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../../../cosmos/base/query/v1beta1/pagination"; +import { Channel, ChannelSDKType, IdentifiedChannel, IdentifiedChannelSDKType, PacketState, PacketStateSDKType } from "./channel"; +import { Height, HeightSDKType, IdentifiedClientState, IdentifiedClientStateSDKType } from "../../client/v1/client"; +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** QueryChannelRequest is the request type for the Query/Channel RPC method */ + +export interface QueryChannelRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + + channelId: string; +} +/** QueryChannelRequest is the request type for the Query/Channel RPC method */ + +export interface QueryChannelRequestSDKType { + port_id: string; + channel_id: string; +} +/** + * QueryChannelResponse is the response type for the Query/Channel RPC method. + * Besides the Channel end, it includes a proof and the height from which the + * proof was retrieved. + */ + +export interface QueryChannelResponse { + /** channel associated with the request identifiers */ + channel?: Channel; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was retrieved */ + + proofHeight?: Height; +} +/** + * QueryChannelResponse is the response type for the Query/Channel RPC method. + * Besides the Channel end, it includes a proof and the height from which the + * proof was retrieved. + */ + +export interface QueryChannelResponseSDKType { + channel?: ChannelSDKType; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** QueryChannelsRequest is the request type for the Query/Channels RPC method */ + +export interface QueryChannelsRequest { + /** pagination request */ + pagination?: PageRequest; +} +/** QueryChannelsRequest is the request type for the Query/Channels RPC method */ + +export interface QueryChannelsRequestSDKType { + pagination?: PageRequestSDKType; +} +/** QueryChannelsResponse is the response type for the Query/Channels RPC method. */ + +export interface QueryChannelsResponse { + /** list of stored channels of the chain. */ + channels: IdentifiedChannel[]; + /** pagination response */ + + pagination?: PageResponse; + /** query block height */ + + height?: Height; +} +/** QueryChannelsResponse is the response type for the Query/Channels RPC method. */ + +export interface QueryChannelsResponseSDKType { + channels: IdentifiedChannelSDKType[]; + pagination?: PageResponseSDKType; + height?: HeightSDKType; +} +/** + * QueryConnectionChannelsRequest is the request type for the + * Query/QueryConnectionChannels RPC method + */ + +export interface QueryConnectionChannelsRequest { + /** connection unique identifier */ + connection: string; + /** pagination request */ + + pagination?: PageRequest; +} +/** + * QueryConnectionChannelsRequest is the request type for the + * Query/QueryConnectionChannels RPC method + */ + +export interface QueryConnectionChannelsRequestSDKType { + connection: string; + pagination?: PageRequestSDKType; +} +/** + * QueryConnectionChannelsResponse is the Response type for the + * Query/QueryConnectionChannels RPC method + */ + +export interface QueryConnectionChannelsResponse { + /** list of channels associated with a connection. */ + channels: IdentifiedChannel[]; + /** pagination response */ + + pagination?: PageResponse; + /** query block height */ + + height?: Height; +} +/** + * QueryConnectionChannelsResponse is the Response type for the + * Query/QueryConnectionChannels RPC method + */ + +export interface QueryConnectionChannelsResponseSDKType { + channels: IdentifiedChannelSDKType[]; + pagination?: PageResponseSDKType; + height?: HeightSDKType; +} +/** + * QueryChannelClientStateRequest is the request type for the Query/ClientState + * RPC method + */ + +export interface QueryChannelClientStateRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + + channelId: string; +} +/** + * QueryChannelClientStateRequest is the request type for the Query/ClientState + * RPC method + */ + +export interface QueryChannelClientStateRequestSDKType { + port_id: string; + channel_id: string; +} +/** + * QueryChannelClientStateResponse is the Response type for the + * Query/QueryChannelClientState RPC method + */ + +export interface QueryChannelClientStateResponse { + /** client state associated with the channel */ + identifiedClientState?: IdentifiedClientState; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was retrieved */ + + proofHeight?: Height; +} +/** + * QueryChannelClientStateResponse is the Response type for the + * Query/QueryChannelClientState RPC method + */ + +export interface QueryChannelClientStateResponseSDKType { + identified_client_state?: IdentifiedClientStateSDKType; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryChannelConsensusStateRequest is the request type for the + * Query/ConsensusState RPC method + */ + +export interface QueryChannelConsensusStateRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + + channelId: string; + /** revision number of the consensus state */ + + revisionNumber: Long; + /** revision height of the consensus state */ + + revisionHeight: Long; +} +/** + * QueryChannelConsensusStateRequest is the request type for the + * Query/ConsensusState RPC method + */ + +export interface QueryChannelConsensusStateRequestSDKType { + port_id: string; + channel_id: string; + revision_number: Long; + revision_height: Long; +} +/** + * QueryChannelClientStateResponse is the Response type for the + * Query/QueryChannelClientState RPC method + */ + +export interface QueryChannelConsensusStateResponse { + /** consensus state associated with the channel */ + consensusState?: Any; + /** client ID associated with the consensus state */ + + clientId: string; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was retrieved */ + + proofHeight?: Height; +} +/** + * QueryChannelClientStateResponse is the Response type for the + * Query/QueryChannelClientState RPC method + */ + +export interface QueryChannelConsensusStateResponseSDKType { + consensus_state?: AnySDKType; + client_id: string; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryPacketCommitmentRequest is the request type for the + * Query/PacketCommitment RPC method + */ + +export interface QueryPacketCommitmentRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + + channelId: string; + /** packet sequence */ + + sequence: Long; +} +/** + * QueryPacketCommitmentRequest is the request type for the + * Query/PacketCommitment RPC method + */ + +export interface QueryPacketCommitmentRequestSDKType { + port_id: string; + channel_id: string; + sequence: Long; +} +/** + * QueryPacketCommitmentResponse defines the client query response for a packet + * which also includes a proof and the height from which the proof was + * retrieved + */ + +export interface QueryPacketCommitmentResponse { + /** packet associated with the request fields */ + commitment: Uint8Array; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was retrieved */ + + proofHeight?: Height; +} +/** + * QueryPacketCommitmentResponse defines the client query response for a packet + * which also includes a proof and the height from which the proof was + * retrieved + */ + +export interface QueryPacketCommitmentResponseSDKType { + commitment: Uint8Array; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryPacketCommitmentsRequest is the request type for the + * Query/QueryPacketCommitments RPC method + */ + +export interface QueryPacketCommitmentsRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + + channelId: string; + /** pagination request */ + + pagination?: PageRequest; +} +/** + * QueryPacketCommitmentsRequest is the request type for the + * Query/QueryPacketCommitments RPC method + */ + +export interface QueryPacketCommitmentsRequestSDKType { + port_id: string; + channel_id: string; + pagination?: PageRequestSDKType; +} +/** + * QueryPacketCommitmentsResponse is the request type for the + * Query/QueryPacketCommitments RPC method + */ + +export interface QueryPacketCommitmentsResponse { + commitments: PacketState[]; + /** pagination response */ + + pagination?: PageResponse; + /** query block height */ + + height?: Height; +} +/** + * QueryPacketCommitmentsResponse is the request type for the + * Query/QueryPacketCommitments RPC method + */ + +export interface QueryPacketCommitmentsResponseSDKType { + commitments: PacketStateSDKType[]; + pagination?: PageResponseSDKType; + height?: HeightSDKType; +} +/** + * QueryPacketReceiptRequest is the request type for the + * Query/PacketReceipt RPC method + */ + +export interface QueryPacketReceiptRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + + channelId: string; + /** packet sequence */ + + sequence: Long; +} +/** + * QueryPacketReceiptRequest is the request type for the + * Query/PacketReceipt RPC method + */ + +export interface QueryPacketReceiptRequestSDKType { + port_id: string; + channel_id: string; + sequence: Long; +} +/** + * QueryPacketReceiptResponse defines the client query response for a packet + * receipt which also includes a proof, and the height from which the proof was + * retrieved + */ + +export interface QueryPacketReceiptResponse { + /** success flag for if receipt exists */ + received: boolean; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was retrieved */ + + proofHeight?: Height; +} +/** + * QueryPacketReceiptResponse defines the client query response for a packet + * receipt which also includes a proof, and the height from which the proof was + * retrieved + */ + +export interface QueryPacketReceiptResponseSDKType { + received: boolean; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryPacketAcknowledgementRequest is the request type for the + * Query/PacketAcknowledgement RPC method + */ + +export interface QueryPacketAcknowledgementRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + + channelId: string; + /** packet sequence */ + + sequence: Long; +} +/** + * QueryPacketAcknowledgementRequest is the request type for the + * Query/PacketAcknowledgement RPC method + */ + +export interface QueryPacketAcknowledgementRequestSDKType { + port_id: string; + channel_id: string; + sequence: Long; +} +/** + * QueryPacketAcknowledgementResponse defines the client query response for a + * packet which also includes a proof and the height from which the + * proof was retrieved + */ + +export interface QueryPacketAcknowledgementResponse { + /** packet associated with the request fields */ + acknowledgement: Uint8Array; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was retrieved */ + + proofHeight?: Height; +} +/** + * QueryPacketAcknowledgementResponse defines the client query response for a + * packet which also includes a proof and the height from which the + * proof was retrieved + */ + +export interface QueryPacketAcknowledgementResponseSDKType { + acknowledgement: Uint8Array; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryPacketAcknowledgementsRequest is the request type for the + * Query/QueryPacketCommitments RPC method + */ + +export interface QueryPacketAcknowledgementsRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + + channelId: string; + /** pagination request */ + + pagination?: PageRequest; + /** list of packet sequences */ + + packetCommitmentSequences: Long[]; +} +/** + * QueryPacketAcknowledgementsRequest is the request type for the + * Query/QueryPacketCommitments RPC method + */ + +export interface QueryPacketAcknowledgementsRequestSDKType { + port_id: string; + channel_id: string; + pagination?: PageRequestSDKType; + packet_commitment_sequences: Long[]; +} +/** + * QueryPacketAcknowledgemetsResponse is the request type for the + * Query/QueryPacketAcknowledgements RPC method + */ + +export interface QueryPacketAcknowledgementsResponse { + acknowledgements: PacketState[]; + /** pagination response */ + + pagination?: PageResponse; + /** query block height */ + + height?: Height; +} +/** + * QueryPacketAcknowledgemetsResponse is the request type for the + * Query/QueryPacketAcknowledgements RPC method + */ + +export interface QueryPacketAcknowledgementsResponseSDKType { + acknowledgements: PacketStateSDKType[]; + pagination?: PageResponseSDKType; + height?: HeightSDKType; +} +/** + * QueryUnreceivedPacketsRequest is the request type for the + * Query/UnreceivedPackets RPC method + */ + +export interface QueryUnreceivedPacketsRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + + channelId: string; + /** list of packet sequences */ + + packetCommitmentSequences: Long[]; +} +/** + * QueryUnreceivedPacketsRequest is the request type for the + * Query/UnreceivedPackets RPC method + */ + +export interface QueryUnreceivedPacketsRequestSDKType { + port_id: string; + channel_id: string; + packet_commitment_sequences: Long[]; +} +/** + * QueryUnreceivedPacketsResponse is the response type for the + * Query/UnreceivedPacketCommitments RPC method + */ + +export interface QueryUnreceivedPacketsResponse { + /** list of unreceived packet sequences */ + sequences: Long[]; + /** query block height */ + + height?: Height; +} +/** + * QueryUnreceivedPacketsResponse is the response type for the + * Query/UnreceivedPacketCommitments RPC method + */ + +export interface QueryUnreceivedPacketsResponseSDKType { + sequences: Long[]; + height?: HeightSDKType; +} +/** + * QueryUnreceivedAcks is the request type for the + * Query/UnreceivedAcks RPC method + */ + +export interface QueryUnreceivedAcksRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + + channelId: string; + /** list of acknowledgement sequences */ + + packetAckSequences: Long[]; +} +/** + * QueryUnreceivedAcks is the request type for the + * Query/UnreceivedAcks RPC method + */ + +export interface QueryUnreceivedAcksRequestSDKType { + port_id: string; + channel_id: string; + packet_ack_sequences: Long[]; +} +/** + * QueryUnreceivedAcksResponse is the response type for the + * Query/UnreceivedAcks RPC method + */ + +export interface QueryUnreceivedAcksResponse { + /** list of unreceived acknowledgement sequences */ + sequences: Long[]; + /** query block height */ + + height?: Height; +} +/** + * QueryUnreceivedAcksResponse is the response type for the + * Query/UnreceivedAcks RPC method + */ + +export interface QueryUnreceivedAcksResponseSDKType { + sequences: Long[]; + height?: HeightSDKType; +} +/** + * QueryNextSequenceReceiveRequest is the request type for the + * Query/QueryNextSequenceReceiveRequest RPC method + */ + +export interface QueryNextSequenceReceiveRequest { + /** port unique identifier */ + portId: string; + /** channel unique identifier */ + + channelId: string; +} +/** + * QueryNextSequenceReceiveRequest is the request type for the + * Query/QueryNextSequenceReceiveRequest RPC method + */ + +export interface QueryNextSequenceReceiveRequestSDKType { + port_id: string; + channel_id: string; +} +/** + * QuerySequenceResponse is the request type for the + * Query/QueryNextSequenceReceiveResponse RPC method + */ + +export interface QueryNextSequenceReceiveResponse { + /** next sequence receive number */ + nextSequenceReceive: Long; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was retrieved */ + + proofHeight?: Height; +} +/** + * QuerySequenceResponse is the request type for the + * Query/QueryNextSequenceReceiveResponse RPC method + */ + +export interface QueryNextSequenceReceiveResponseSDKType { + next_sequence_receive: Long; + proof: Uint8Array; + proof_height?: HeightSDKType; +} + +function createBaseQueryChannelRequest(): QueryChannelRequest { + return { + portId: "", + channelId: "" + }; +} + +export const QueryChannelRequest = { + encode(message: QueryChannelRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryChannelRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryChannelRequest { + const message = createBaseQueryChannelRequest(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + return message; + } + +}; + +function createBaseQueryChannelResponse(): QueryChannelResponse { + return { + channel: undefined, + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryChannelResponse = { + encode(message: QueryChannelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.channel !== undefined) { + Channel.encode(message.channel, writer.uint32(10).fork()).ldelim(); + } + + if (message.proof.length !== 0) { + writer.uint32(18).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryChannelResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.channel = Channel.decode(reader, reader.uint32()); + break; + + case 2: + message.proof = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryChannelResponse { + const message = createBaseQueryChannelResponse(); + message.channel = object.channel !== undefined && object.channel !== null ? Channel.fromPartial(object.channel) : undefined; + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; + +function createBaseQueryChannelsRequest(): QueryChannelsRequest { + return { + pagination: undefined + }; +} + +export const QueryChannelsRequest = { + encode(message: QueryChannelsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryChannelsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryChannelsRequest { + const message = createBaseQueryChannelsRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryChannelsResponse(): QueryChannelsResponse { + return { + channels: [], + pagination: undefined, + height: undefined + }; +} + +export const QueryChannelsResponse = { + encode(message: QueryChannelsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.channels) { + IdentifiedChannel.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + if (message.height !== undefined) { + Height.encode(message.height, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryChannelsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.channels.push(IdentifiedChannel.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + case 3: + message.height = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryChannelsResponse { + const message = createBaseQueryChannelsResponse(); + message.channels = object.channels?.map(e => IdentifiedChannel.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + message.height = object.height !== undefined && object.height !== null ? Height.fromPartial(object.height) : undefined; + return message; + } + +}; + +function createBaseQueryConnectionChannelsRequest(): QueryConnectionChannelsRequest { + return { + connection: "", + pagination: undefined + }; +} + +export const QueryConnectionChannelsRequest = { + encode(message: QueryConnectionChannelsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.connection !== "") { + writer.uint32(10).string(message.connection); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionChannelsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConnectionChannelsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.connection = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConnectionChannelsRequest { + const message = createBaseQueryConnectionChannelsRequest(); + message.connection = object.connection ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryConnectionChannelsResponse(): QueryConnectionChannelsResponse { + return { + channels: [], + pagination: undefined, + height: undefined + }; +} + +export const QueryConnectionChannelsResponse = { + encode(message: QueryConnectionChannelsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.channels) { + IdentifiedChannel.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + if (message.height !== undefined) { + Height.encode(message.height, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionChannelsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConnectionChannelsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.channels.push(IdentifiedChannel.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + case 3: + message.height = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConnectionChannelsResponse { + const message = createBaseQueryConnectionChannelsResponse(); + message.channels = object.channels?.map(e => IdentifiedChannel.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + message.height = object.height !== undefined && object.height !== null ? Height.fromPartial(object.height) : undefined; + return message; + } + +}; + +function createBaseQueryChannelClientStateRequest(): QueryChannelClientStateRequest { + return { + portId: "", + channelId: "" + }; +} + +export const QueryChannelClientStateRequest = { + encode(message: QueryChannelClientStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelClientStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryChannelClientStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryChannelClientStateRequest { + const message = createBaseQueryChannelClientStateRequest(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + return message; + } + +}; + +function createBaseQueryChannelClientStateResponse(): QueryChannelClientStateResponse { + return { + identifiedClientState: undefined, + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryChannelClientStateResponse = { + encode(message: QueryChannelClientStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.identifiedClientState !== undefined) { + IdentifiedClientState.encode(message.identifiedClientState, writer.uint32(10).fork()).ldelim(); + } + + if (message.proof.length !== 0) { + writer.uint32(18).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelClientStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryChannelClientStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.identifiedClientState = IdentifiedClientState.decode(reader, reader.uint32()); + break; + + case 2: + message.proof = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryChannelClientStateResponse { + const message = createBaseQueryChannelClientStateResponse(); + message.identifiedClientState = object.identifiedClientState !== undefined && object.identifiedClientState !== null ? IdentifiedClientState.fromPartial(object.identifiedClientState) : undefined; + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; + +function createBaseQueryChannelConsensusStateRequest(): QueryChannelConsensusStateRequest { + return { + portId: "", + channelId: "", + revisionNumber: Long.UZERO, + revisionHeight: Long.UZERO + }; +} + +export const QueryChannelConsensusStateRequest = { + encode(message: QueryChannelConsensusStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + if (!message.revisionNumber.isZero()) { + writer.uint32(24).uint64(message.revisionNumber); + } + + if (!message.revisionHeight.isZero()) { + writer.uint32(32).uint64(message.revisionHeight); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelConsensusStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryChannelConsensusStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + message.revisionNumber = (reader.uint64() as Long); + break; + + case 4: + message.revisionHeight = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryChannelConsensusStateRequest { + const message = createBaseQueryChannelConsensusStateRequest(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.revisionNumber = object.revisionNumber !== undefined && object.revisionNumber !== null ? Long.fromValue(object.revisionNumber) : Long.UZERO; + message.revisionHeight = object.revisionHeight !== undefined && object.revisionHeight !== null ? Long.fromValue(object.revisionHeight) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryChannelConsensusStateResponse(): QueryChannelConsensusStateResponse { + return { + consensusState: undefined, + clientId: "", + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryChannelConsensusStateResponse = { + encode(message: QueryChannelConsensusStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.consensusState !== undefined) { + Any.encode(message.consensusState, writer.uint32(10).fork()).ldelim(); + } + + if (message.clientId !== "") { + writer.uint32(18).string(message.clientId); + } + + if (message.proof.length !== 0) { + writer.uint32(26).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryChannelConsensusStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryChannelConsensusStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.consensusState = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.clientId = reader.string(); + break; + + case 3: + message.proof = reader.bytes(); + break; + + case 4: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryChannelConsensusStateResponse { + const message = createBaseQueryChannelConsensusStateResponse(); + message.consensusState = object.consensusState !== undefined && object.consensusState !== null ? Any.fromPartial(object.consensusState) : undefined; + message.clientId = object.clientId ?? ""; + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; + +function createBaseQueryPacketCommitmentRequest(): QueryPacketCommitmentRequest { + return { + portId: "", + channelId: "", + sequence: Long.UZERO + }; +} + +export const QueryPacketCommitmentRequest = { + encode(message: QueryPacketCommitmentRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + if (!message.sequence.isZero()) { + writer.uint32(24).uint64(message.sequence); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketCommitmentRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPacketCommitmentRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + message.sequence = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPacketCommitmentRequest { + const message = createBaseQueryPacketCommitmentRequest(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryPacketCommitmentResponse(): QueryPacketCommitmentResponse { + return { + commitment: new Uint8Array(), + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryPacketCommitmentResponse = { + encode(message: QueryPacketCommitmentResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.commitment.length !== 0) { + writer.uint32(10).bytes(message.commitment); + } + + if (message.proof.length !== 0) { + writer.uint32(18).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketCommitmentResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPacketCommitmentResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.commitment = reader.bytes(); + break; + + case 2: + message.proof = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPacketCommitmentResponse { + const message = createBaseQueryPacketCommitmentResponse(); + message.commitment = object.commitment ?? new Uint8Array(); + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; + +function createBaseQueryPacketCommitmentsRequest(): QueryPacketCommitmentsRequest { + return { + portId: "", + channelId: "", + pagination: undefined + }; +} + +export const QueryPacketCommitmentsRequest = { + encode(message: QueryPacketCommitmentsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketCommitmentsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPacketCommitmentsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPacketCommitmentsRequest { + const message = createBaseQueryPacketCommitmentsRequest(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryPacketCommitmentsResponse(): QueryPacketCommitmentsResponse { + return { + commitments: [], + pagination: undefined, + height: undefined + }; +} + +export const QueryPacketCommitmentsResponse = { + encode(message: QueryPacketCommitmentsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.commitments) { + PacketState.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + if (message.height !== undefined) { + Height.encode(message.height, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketCommitmentsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPacketCommitmentsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.commitments.push(PacketState.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + case 3: + message.height = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPacketCommitmentsResponse { + const message = createBaseQueryPacketCommitmentsResponse(); + message.commitments = object.commitments?.map(e => PacketState.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + message.height = object.height !== undefined && object.height !== null ? Height.fromPartial(object.height) : undefined; + return message; + } + +}; + +function createBaseQueryPacketReceiptRequest(): QueryPacketReceiptRequest { + return { + portId: "", + channelId: "", + sequence: Long.UZERO + }; +} + +export const QueryPacketReceiptRequest = { + encode(message: QueryPacketReceiptRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + if (!message.sequence.isZero()) { + writer.uint32(24).uint64(message.sequence); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketReceiptRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPacketReceiptRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + message.sequence = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPacketReceiptRequest { + const message = createBaseQueryPacketReceiptRequest(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryPacketReceiptResponse(): QueryPacketReceiptResponse { + return { + received: false, + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryPacketReceiptResponse = { + encode(message: QueryPacketReceiptResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.received === true) { + writer.uint32(16).bool(message.received); + } + + if (message.proof.length !== 0) { + writer.uint32(26).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketReceiptResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPacketReceiptResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 2: + message.received = reader.bool(); + break; + + case 3: + message.proof = reader.bytes(); + break; + + case 4: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPacketReceiptResponse { + const message = createBaseQueryPacketReceiptResponse(); + message.received = object.received ?? false; + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; + +function createBaseQueryPacketAcknowledgementRequest(): QueryPacketAcknowledgementRequest { + return { + portId: "", + channelId: "", + sequence: Long.UZERO + }; +} + +export const QueryPacketAcknowledgementRequest = { + encode(message: QueryPacketAcknowledgementRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + if (!message.sequence.isZero()) { + writer.uint32(24).uint64(message.sequence); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketAcknowledgementRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPacketAcknowledgementRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + message.sequence = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPacketAcknowledgementRequest { + const message = createBaseQueryPacketAcknowledgementRequest(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryPacketAcknowledgementResponse(): QueryPacketAcknowledgementResponse { + return { + acknowledgement: new Uint8Array(), + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryPacketAcknowledgementResponse = { + encode(message: QueryPacketAcknowledgementResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.acknowledgement.length !== 0) { + writer.uint32(10).bytes(message.acknowledgement); + } + + if (message.proof.length !== 0) { + writer.uint32(18).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketAcknowledgementResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPacketAcknowledgementResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.acknowledgement = reader.bytes(); + break; + + case 2: + message.proof = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPacketAcknowledgementResponse { + const message = createBaseQueryPacketAcknowledgementResponse(); + message.acknowledgement = object.acknowledgement ?? new Uint8Array(); + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; + +function createBaseQueryPacketAcknowledgementsRequest(): QueryPacketAcknowledgementsRequest { + return { + portId: "", + channelId: "", + pagination: undefined, + packetCommitmentSequences: [] + }; +} + +export const QueryPacketAcknowledgementsRequest = { + encode(message: QueryPacketAcknowledgementsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(26).fork()).ldelim(); + } + + writer.uint32(34).fork(); + + for (const v of message.packetCommitmentSequences) { + writer.uint64(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketAcknowledgementsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPacketAcknowledgementsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + case 4: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.packetCommitmentSequences.push((reader.uint64() as Long)); + } + } else { + message.packetCommitmentSequences.push((reader.uint64() as Long)); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPacketAcknowledgementsRequest { + const message = createBaseQueryPacketAcknowledgementsRequest(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + message.packetCommitmentSequences = object.packetCommitmentSequences?.map(e => Long.fromValue(e)) || []; + return message; + } + +}; + +function createBaseQueryPacketAcknowledgementsResponse(): QueryPacketAcknowledgementsResponse { + return { + acknowledgements: [], + pagination: undefined, + height: undefined + }; +} + +export const QueryPacketAcknowledgementsResponse = { + encode(message: QueryPacketAcknowledgementsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.acknowledgements) { + PacketState.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + if (message.height !== undefined) { + Height.encode(message.height, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryPacketAcknowledgementsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryPacketAcknowledgementsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.acknowledgements.push(PacketState.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + case 3: + message.height = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryPacketAcknowledgementsResponse { + const message = createBaseQueryPacketAcknowledgementsResponse(); + message.acknowledgements = object.acknowledgements?.map(e => PacketState.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + message.height = object.height !== undefined && object.height !== null ? Height.fromPartial(object.height) : undefined; + return message; + } + +}; + +function createBaseQueryUnreceivedPacketsRequest(): QueryUnreceivedPacketsRequest { + return { + portId: "", + channelId: "", + packetCommitmentSequences: [] + }; +} + +export const QueryUnreceivedPacketsRequest = { + encode(message: QueryUnreceivedPacketsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + writer.uint32(26).fork(); + + for (const v of message.packetCommitmentSequences) { + writer.uint64(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnreceivedPacketsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUnreceivedPacketsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.packetCommitmentSequences.push((reader.uint64() as Long)); + } + } else { + message.packetCommitmentSequences.push((reader.uint64() as Long)); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryUnreceivedPacketsRequest { + const message = createBaseQueryUnreceivedPacketsRequest(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.packetCommitmentSequences = object.packetCommitmentSequences?.map(e => Long.fromValue(e)) || []; + return message; + } + +}; + +function createBaseQueryUnreceivedPacketsResponse(): QueryUnreceivedPacketsResponse { + return { + sequences: [], + height: undefined + }; +} + +export const QueryUnreceivedPacketsResponse = { + encode(message: QueryUnreceivedPacketsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + + for (const v of message.sequences) { + writer.uint64(v); + } + + writer.ldelim(); + + if (message.height !== undefined) { + Height.encode(message.height, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnreceivedPacketsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUnreceivedPacketsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.sequences.push((reader.uint64() as Long)); + } + } else { + message.sequences.push((reader.uint64() as Long)); + } + + break; + + case 2: + message.height = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryUnreceivedPacketsResponse { + const message = createBaseQueryUnreceivedPacketsResponse(); + message.sequences = object.sequences?.map(e => Long.fromValue(e)) || []; + message.height = object.height !== undefined && object.height !== null ? Height.fromPartial(object.height) : undefined; + return message; + } + +}; + +function createBaseQueryUnreceivedAcksRequest(): QueryUnreceivedAcksRequest { + return { + portId: "", + channelId: "", + packetAckSequences: [] + }; +} + +export const QueryUnreceivedAcksRequest = { + encode(message: QueryUnreceivedAcksRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + writer.uint32(26).fork(); + + for (const v of message.packetAckSequences) { + writer.uint64(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnreceivedAcksRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUnreceivedAcksRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.packetAckSequences.push((reader.uint64() as Long)); + } + } else { + message.packetAckSequences.push((reader.uint64() as Long)); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryUnreceivedAcksRequest { + const message = createBaseQueryUnreceivedAcksRequest(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.packetAckSequences = object.packetAckSequences?.map(e => Long.fromValue(e)) || []; + return message; + } + +}; + +function createBaseQueryUnreceivedAcksResponse(): QueryUnreceivedAcksResponse { + return { + sequences: [], + height: undefined + }; +} + +export const QueryUnreceivedAcksResponse = { + encode(message: QueryUnreceivedAcksResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + + for (const v of message.sequences) { + writer.uint64(v); + } + + writer.ldelim(); + + if (message.height !== undefined) { + Height.encode(message.height, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUnreceivedAcksResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUnreceivedAcksResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.sequences.push((reader.uint64() as Long)); + } + } else { + message.sequences.push((reader.uint64() as Long)); + } + + break; + + case 2: + message.height = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryUnreceivedAcksResponse { + const message = createBaseQueryUnreceivedAcksResponse(); + message.sequences = object.sequences?.map(e => Long.fromValue(e)) || []; + message.height = object.height !== undefined && object.height !== null ? Height.fromPartial(object.height) : undefined; + return message; + } + +}; + +function createBaseQueryNextSequenceReceiveRequest(): QueryNextSequenceReceiveRequest { + return { + portId: "", + channelId: "" + }; +} + +export const QueryNextSequenceReceiveRequest = { + encode(message: QueryNextSequenceReceiveRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNextSequenceReceiveRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryNextSequenceReceiveRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryNextSequenceReceiveRequest { + const message = createBaseQueryNextSequenceReceiveRequest(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + return message; + } + +}; + +function createBaseQueryNextSequenceReceiveResponse(): QueryNextSequenceReceiveResponse { + return { + nextSequenceReceive: Long.UZERO, + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryNextSequenceReceiveResponse = { + encode(message: QueryNextSequenceReceiveResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.nextSequenceReceive.isZero()) { + writer.uint32(8).uint64(message.nextSequenceReceive); + } + + if (message.proof.length !== 0) { + writer.uint32(18).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryNextSequenceReceiveResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryNextSequenceReceiveResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.nextSequenceReceive = (reader.uint64() as Long); + break; + + case 2: + message.proof = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryNextSequenceReceiveResponse { + const message = createBaseQueryNextSequenceReceiveResponse(); + message.nextSequenceReceive = object.nextSequenceReceive !== undefined && object.nextSequenceReceive !== null ? Long.fromValue(object.nextSequenceReceive) : Long.UZERO; + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/channel/v1/tx.rpc.msg.ts b/packages/codegen/src/ibc/core/channel/v1/tx.rpc.msg.ts new file mode 100644 index 00000000..b9ebcc50 --- /dev/null +++ b/packages/codegen/src/ibc/core/channel/v1/tx.rpc.msg.ts @@ -0,0 +1,117 @@ +import { Rpc } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgChannelOpenInit, MsgChannelOpenInitResponse, MsgChannelOpenTry, MsgChannelOpenTryResponse, MsgChannelOpenAck, MsgChannelOpenAckResponse, MsgChannelOpenConfirm, MsgChannelOpenConfirmResponse, MsgChannelCloseInit, MsgChannelCloseInitResponse, MsgChannelCloseConfirm, MsgChannelCloseConfirmResponse, MsgRecvPacket, MsgRecvPacketResponse, MsgTimeout, MsgTimeoutResponse, MsgTimeoutOnClose, MsgTimeoutOnCloseResponse, MsgAcknowledgement, MsgAcknowledgementResponse } from "./tx"; +/** Msg defines the ibc/channel Msg service. */ + +export interface Msg { + /** ChannelOpenInit defines a rpc handler method for MsgChannelOpenInit. */ + channelOpenInit(request: MsgChannelOpenInit): Promise; + /** ChannelOpenTry defines a rpc handler method for MsgChannelOpenTry. */ + + channelOpenTry(request: MsgChannelOpenTry): Promise; + /** ChannelOpenAck defines a rpc handler method for MsgChannelOpenAck. */ + + channelOpenAck(request: MsgChannelOpenAck): Promise; + /** ChannelOpenConfirm defines a rpc handler method for MsgChannelOpenConfirm. */ + + channelOpenConfirm(request: MsgChannelOpenConfirm): Promise; + /** ChannelCloseInit defines a rpc handler method for MsgChannelCloseInit. */ + + channelCloseInit(request: MsgChannelCloseInit): Promise; + /** + * ChannelCloseConfirm defines a rpc handler method for + * MsgChannelCloseConfirm. + */ + + channelCloseConfirm(request: MsgChannelCloseConfirm): Promise; + /** RecvPacket defines a rpc handler method for MsgRecvPacket. */ + + recvPacket(request: MsgRecvPacket): Promise; + /** Timeout defines a rpc handler method for MsgTimeout. */ + + timeout(request: MsgTimeout): Promise; + /** TimeoutOnClose defines a rpc handler method for MsgTimeoutOnClose. */ + + timeoutOnClose(request: MsgTimeoutOnClose): Promise; + /** Acknowledgement defines a rpc handler method for MsgAcknowledgement. */ + + acknowledgement(request: MsgAcknowledgement): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.channelOpenInit = this.channelOpenInit.bind(this); + this.channelOpenTry = this.channelOpenTry.bind(this); + this.channelOpenAck = this.channelOpenAck.bind(this); + this.channelOpenConfirm = this.channelOpenConfirm.bind(this); + this.channelCloseInit = this.channelCloseInit.bind(this); + this.channelCloseConfirm = this.channelCloseConfirm.bind(this); + this.recvPacket = this.recvPacket.bind(this); + this.timeout = this.timeout.bind(this); + this.timeoutOnClose = this.timeoutOnClose.bind(this); + this.acknowledgement = this.acknowledgement.bind(this); + } + + channelOpenInit(request: MsgChannelOpenInit): Promise { + const data = MsgChannelOpenInit.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Msg", "ChannelOpenInit", data); + return promise.then(data => MsgChannelOpenInitResponse.decode(new _m0.Reader(data))); + } + + channelOpenTry(request: MsgChannelOpenTry): Promise { + const data = MsgChannelOpenTry.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Msg", "ChannelOpenTry", data); + return promise.then(data => MsgChannelOpenTryResponse.decode(new _m0.Reader(data))); + } + + channelOpenAck(request: MsgChannelOpenAck): Promise { + const data = MsgChannelOpenAck.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Msg", "ChannelOpenAck", data); + return promise.then(data => MsgChannelOpenAckResponse.decode(new _m0.Reader(data))); + } + + channelOpenConfirm(request: MsgChannelOpenConfirm): Promise { + const data = MsgChannelOpenConfirm.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Msg", "ChannelOpenConfirm", data); + return promise.then(data => MsgChannelOpenConfirmResponse.decode(new _m0.Reader(data))); + } + + channelCloseInit(request: MsgChannelCloseInit): Promise { + const data = MsgChannelCloseInit.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Msg", "ChannelCloseInit", data); + return promise.then(data => MsgChannelCloseInitResponse.decode(new _m0.Reader(data))); + } + + channelCloseConfirm(request: MsgChannelCloseConfirm): Promise { + const data = MsgChannelCloseConfirm.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Msg", "ChannelCloseConfirm", data); + return promise.then(data => MsgChannelCloseConfirmResponse.decode(new _m0.Reader(data))); + } + + recvPacket(request: MsgRecvPacket): Promise { + const data = MsgRecvPacket.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Msg", "RecvPacket", data); + return promise.then(data => MsgRecvPacketResponse.decode(new _m0.Reader(data))); + } + + timeout(request: MsgTimeout): Promise { + const data = MsgTimeout.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Msg", "Timeout", data); + return promise.then(data => MsgTimeoutResponse.decode(new _m0.Reader(data))); + } + + timeoutOnClose(request: MsgTimeoutOnClose): Promise { + const data = MsgTimeoutOnClose.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Msg", "TimeoutOnClose", data); + return promise.then(data => MsgTimeoutOnCloseResponse.decode(new _m0.Reader(data))); + } + + acknowledgement(request: MsgAcknowledgement): Promise { + const data = MsgAcknowledgement.encode(request).finish(); + const promise = this.rpc.request("ibc.core.channel.v1.Msg", "Acknowledgement", data); + return promise.then(data => MsgAcknowledgementResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/channel/v1/tx.ts b/packages/codegen/src/ibc/core/channel/v1/tx.ts new file mode 100644 index 00000000..1c22c471 --- /dev/null +++ b/packages/codegen/src/ibc/core/channel/v1/tx.ts @@ -0,0 +1,1487 @@ +import { Channel, ChannelSDKType, Packet, PacketSDKType } from "./channel"; +import { Height, HeightSDKType } from "../../client/v1/client"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * MsgChannelOpenInit defines an sdk.Msg to initialize a channel handshake. It + * is called by a relayer on Chain A. + */ + +export interface MsgChannelOpenInit { + portId: string; + channel?: Channel; + signer: string; +} +/** + * MsgChannelOpenInit defines an sdk.Msg to initialize a channel handshake. It + * is called by a relayer on Chain A. + */ + +export interface MsgChannelOpenInitSDKType { + port_id: string; + channel?: ChannelSDKType; + signer: string; +} +/** MsgChannelOpenInitResponse defines the Msg/ChannelOpenInit response type. */ + +export interface MsgChannelOpenInitResponse {} +/** MsgChannelOpenInitResponse defines the Msg/ChannelOpenInit response type. */ + +export interface MsgChannelOpenInitResponseSDKType {} +/** + * MsgChannelOpenInit defines a msg sent by a Relayer to try to open a channel + * on Chain B. + */ + +export interface MsgChannelOpenTry { + portId: string; + /** + * in the case of crossing hello's, when both chains call OpenInit, we need + * the channel identifier of the previous channel in state INIT + */ + + previousChannelId: string; + channel?: Channel; + counterpartyVersion: string; + proofInit: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** + * MsgChannelOpenInit defines a msg sent by a Relayer to try to open a channel + * on Chain B. + */ + +export interface MsgChannelOpenTrySDKType { + port_id: string; + previous_channel_id: string; + channel?: ChannelSDKType; + counterparty_version: string; + proof_init: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** MsgChannelOpenTryResponse defines the Msg/ChannelOpenTry response type. */ + +export interface MsgChannelOpenTryResponse {} +/** MsgChannelOpenTryResponse defines the Msg/ChannelOpenTry response type. */ + +export interface MsgChannelOpenTryResponseSDKType {} +/** + * MsgChannelOpenAck defines a msg sent by a Relayer to Chain A to acknowledge + * the change of channel state to TRYOPEN on Chain B. + */ + +export interface MsgChannelOpenAck { + portId: string; + channelId: string; + counterpartyChannelId: string; + counterpartyVersion: string; + proofTry: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** + * MsgChannelOpenAck defines a msg sent by a Relayer to Chain A to acknowledge + * the change of channel state to TRYOPEN on Chain B. + */ + +export interface MsgChannelOpenAckSDKType { + port_id: string; + channel_id: string; + counterparty_channel_id: string; + counterparty_version: string; + proof_try: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** MsgChannelOpenAckResponse defines the Msg/ChannelOpenAck response type. */ + +export interface MsgChannelOpenAckResponse {} +/** MsgChannelOpenAckResponse defines the Msg/ChannelOpenAck response type. */ + +export interface MsgChannelOpenAckResponseSDKType {} +/** + * MsgChannelOpenConfirm defines a msg sent by a Relayer to Chain B to + * acknowledge the change of channel state to OPEN on Chain A. + */ + +export interface MsgChannelOpenConfirm { + portId: string; + channelId: string; + proofAck: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** + * MsgChannelOpenConfirm defines a msg sent by a Relayer to Chain B to + * acknowledge the change of channel state to OPEN on Chain A. + */ + +export interface MsgChannelOpenConfirmSDKType { + port_id: string; + channel_id: string; + proof_ack: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** + * MsgChannelOpenConfirmResponse defines the Msg/ChannelOpenConfirm response + * type. + */ + +export interface MsgChannelOpenConfirmResponse {} +/** + * MsgChannelOpenConfirmResponse defines the Msg/ChannelOpenConfirm response + * type. + */ + +export interface MsgChannelOpenConfirmResponseSDKType {} +/** + * MsgChannelCloseInit defines a msg sent by a Relayer to Chain A + * to close a channel with Chain B. + */ + +export interface MsgChannelCloseInit { + portId: string; + channelId: string; + signer: string; +} +/** + * MsgChannelCloseInit defines a msg sent by a Relayer to Chain A + * to close a channel with Chain B. + */ + +export interface MsgChannelCloseInitSDKType { + port_id: string; + channel_id: string; + signer: string; +} +/** MsgChannelCloseInitResponse defines the Msg/ChannelCloseInit response type. */ + +export interface MsgChannelCloseInitResponse {} +/** MsgChannelCloseInitResponse defines the Msg/ChannelCloseInit response type. */ + +export interface MsgChannelCloseInitResponseSDKType {} +/** + * MsgChannelCloseConfirm defines a msg sent by a Relayer to Chain B + * to acknowledge the change of channel state to CLOSED on Chain A. + */ + +export interface MsgChannelCloseConfirm { + portId: string; + channelId: string; + proofInit: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** + * MsgChannelCloseConfirm defines a msg sent by a Relayer to Chain B + * to acknowledge the change of channel state to CLOSED on Chain A. + */ + +export interface MsgChannelCloseConfirmSDKType { + port_id: string; + channel_id: string; + proof_init: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** + * MsgChannelCloseConfirmResponse defines the Msg/ChannelCloseConfirm response + * type. + */ + +export interface MsgChannelCloseConfirmResponse {} +/** + * MsgChannelCloseConfirmResponse defines the Msg/ChannelCloseConfirm response + * type. + */ + +export interface MsgChannelCloseConfirmResponseSDKType {} +/** MsgRecvPacket receives incoming IBC packet */ + +export interface MsgRecvPacket { + packet?: Packet; + proofCommitment: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** MsgRecvPacket receives incoming IBC packet */ + +export interface MsgRecvPacketSDKType { + packet?: PacketSDKType; + proof_commitment: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** MsgRecvPacketResponse defines the Msg/RecvPacket response type. */ + +export interface MsgRecvPacketResponse {} +/** MsgRecvPacketResponse defines the Msg/RecvPacket response type. */ + +export interface MsgRecvPacketResponseSDKType {} +/** MsgTimeout receives timed-out packet */ + +export interface MsgTimeout { + packet?: Packet; + proofUnreceived: Uint8Array; + proofHeight?: Height; + nextSequenceRecv: Long; + signer: string; +} +/** MsgTimeout receives timed-out packet */ + +export interface MsgTimeoutSDKType { + packet?: PacketSDKType; + proof_unreceived: Uint8Array; + proof_height?: HeightSDKType; + next_sequence_recv: Long; + signer: string; +} +/** MsgTimeoutResponse defines the Msg/Timeout response type. */ + +export interface MsgTimeoutResponse {} +/** MsgTimeoutResponse defines the Msg/Timeout response type. */ + +export interface MsgTimeoutResponseSDKType {} +/** MsgTimeoutOnClose timed-out packet upon counterparty channel closure. */ + +export interface MsgTimeoutOnClose { + packet?: Packet; + proofUnreceived: Uint8Array; + proofClose: Uint8Array; + proofHeight?: Height; + nextSequenceRecv: Long; + signer: string; +} +/** MsgTimeoutOnClose timed-out packet upon counterparty channel closure. */ + +export interface MsgTimeoutOnCloseSDKType { + packet?: PacketSDKType; + proof_unreceived: Uint8Array; + proof_close: Uint8Array; + proof_height?: HeightSDKType; + next_sequence_recv: Long; + signer: string; +} +/** MsgTimeoutOnCloseResponse defines the Msg/TimeoutOnClose response type. */ + +export interface MsgTimeoutOnCloseResponse {} +/** MsgTimeoutOnCloseResponse defines the Msg/TimeoutOnClose response type. */ + +export interface MsgTimeoutOnCloseResponseSDKType {} +/** MsgAcknowledgement receives incoming IBC acknowledgement */ + +export interface MsgAcknowledgement { + packet?: Packet; + acknowledgement: Uint8Array; + proofAcked: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** MsgAcknowledgement receives incoming IBC acknowledgement */ + +export interface MsgAcknowledgementSDKType { + packet?: PacketSDKType; + acknowledgement: Uint8Array; + proof_acked: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** MsgAcknowledgementResponse defines the Msg/Acknowledgement response type. */ + +export interface MsgAcknowledgementResponse {} +/** MsgAcknowledgementResponse defines the Msg/Acknowledgement response type. */ + +export interface MsgAcknowledgementResponseSDKType {} + +function createBaseMsgChannelOpenInit(): MsgChannelOpenInit { + return { + portId: "", + channel: undefined, + signer: "" + }; +} + +export const MsgChannelOpenInit = { + encode(message: MsgChannelOpenInit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channel !== undefined) { + Channel.encode(message.channel, writer.uint32(18).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(26).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenInit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgChannelOpenInit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channel = Channel.decode(reader, reader.uint32()); + break; + + case 3: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgChannelOpenInit { + const message = createBaseMsgChannelOpenInit(); + message.portId = object.portId ?? ""; + message.channel = object.channel !== undefined && object.channel !== null ? Channel.fromPartial(object.channel) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgChannelOpenInitResponse(): MsgChannelOpenInitResponse { + return {}; +} + +export const MsgChannelOpenInitResponse = { + encode(_: MsgChannelOpenInitResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenInitResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgChannelOpenInitResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgChannelOpenInitResponse { + const message = createBaseMsgChannelOpenInitResponse(); + return message; + } + +}; + +function createBaseMsgChannelOpenTry(): MsgChannelOpenTry { + return { + portId: "", + previousChannelId: "", + channel: undefined, + counterpartyVersion: "", + proofInit: new Uint8Array(), + proofHeight: undefined, + signer: "" + }; +} + +export const MsgChannelOpenTry = { + encode(message: MsgChannelOpenTry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.previousChannelId !== "") { + writer.uint32(18).string(message.previousChannelId); + } + + if (message.channel !== undefined) { + Channel.encode(message.channel, writer.uint32(26).fork()).ldelim(); + } + + if (message.counterpartyVersion !== "") { + writer.uint32(34).string(message.counterpartyVersion); + } + + if (message.proofInit.length !== 0) { + writer.uint32(42).bytes(message.proofInit); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(50).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(58).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenTry { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgChannelOpenTry(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.previousChannelId = reader.string(); + break; + + case 3: + message.channel = Channel.decode(reader, reader.uint32()); + break; + + case 4: + message.counterpartyVersion = reader.string(); + break; + + case 5: + message.proofInit = reader.bytes(); + break; + + case 6: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + case 7: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgChannelOpenTry { + const message = createBaseMsgChannelOpenTry(); + message.portId = object.portId ?? ""; + message.previousChannelId = object.previousChannelId ?? ""; + message.channel = object.channel !== undefined && object.channel !== null ? Channel.fromPartial(object.channel) : undefined; + message.counterpartyVersion = object.counterpartyVersion ?? ""; + message.proofInit = object.proofInit ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgChannelOpenTryResponse(): MsgChannelOpenTryResponse { + return {}; +} + +export const MsgChannelOpenTryResponse = { + encode(_: MsgChannelOpenTryResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenTryResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgChannelOpenTryResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgChannelOpenTryResponse { + const message = createBaseMsgChannelOpenTryResponse(); + return message; + } + +}; + +function createBaseMsgChannelOpenAck(): MsgChannelOpenAck { + return { + portId: "", + channelId: "", + counterpartyChannelId: "", + counterpartyVersion: "", + proofTry: new Uint8Array(), + proofHeight: undefined, + signer: "" + }; +} + +export const MsgChannelOpenAck = { + encode(message: MsgChannelOpenAck, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + if (message.counterpartyChannelId !== "") { + writer.uint32(26).string(message.counterpartyChannelId); + } + + if (message.counterpartyVersion !== "") { + writer.uint32(34).string(message.counterpartyVersion); + } + + if (message.proofTry.length !== 0) { + writer.uint32(42).bytes(message.proofTry); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(50).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(58).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenAck { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgChannelOpenAck(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + message.counterpartyChannelId = reader.string(); + break; + + case 4: + message.counterpartyVersion = reader.string(); + break; + + case 5: + message.proofTry = reader.bytes(); + break; + + case 6: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + case 7: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgChannelOpenAck { + const message = createBaseMsgChannelOpenAck(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.counterpartyChannelId = object.counterpartyChannelId ?? ""; + message.counterpartyVersion = object.counterpartyVersion ?? ""; + message.proofTry = object.proofTry ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgChannelOpenAckResponse(): MsgChannelOpenAckResponse { + return {}; +} + +export const MsgChannelOpenAckResponse = { + encode(_: MsgChannelOpenAckResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenAckResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgChannelOpenAckResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgChannelOpenAckResponse { + const message = createBaseMsgChannelOpenAckResponse(); + return message; + } + +}; + +function createBaseMsgChannelOpenConfirm(): MsgChannelOpenConfirm { + return { + portId: "", + channelId: "", + proofAck: new Uint8Array(), + proofHeight: undefined, + signer: "" + }; +} + +export const MsgChannelOpenConfirm = { + encode(message: MsgChannelOpenConfirm, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + if (message.proofAck.length !== 0) { + writer.uint32(26).bytes(message.proofAck); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(34).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(42).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenConfirm { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgChannelOpenConfirm(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + message.proofAck = reader.bytes(); + break; + + case 4: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + case 5: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgChannelOpenConfirm { + const message = createBaseMsgChannelOpenConfirm(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.proofAck = object.proofAck ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgChannelOpenConfirmResponse(): MsgChannelOpenConfirmResponse { + return {}; +} + +export const MsgChannelOpenConfirmResponse = { + encode(_: MsgChannelOpenConfirmResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelOpenConfirmResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgChannelOpenConfirmResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgChannelOpenConfirmResponse { + const message = createBaseMsgChannelOpenConfirmResponse(); + return message; + } + +}; + +function createBaseMsgChannelCloseInit(): MsgChannelCloseInit { + return { + portId: "", + channelId: "", + signer: "" + }; +} + +export const MsgChannelCloseInit = { + encode(message: MsgChannelCloseInit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + if (message.signer !== "") { + writer.uint32(26).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelCloseInit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgChannelCloseInit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgChannelCloseInit { + const message = createBaseMsgChannelCloseInit(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgChannelCloseInitResponse(): MsgChannelCloseInitResponse { + return {}; +} + +export const MsgChannelCloseInitResponse = { + encode(_: MsgChannelCloseInitResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelCloseInitResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgChannelCloseInitResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgChannelCloseInitResponse { + const message = createBaseMsgChannelCloseInitResponse(); + return message; + } + +}; + +function createBaseMsgChannelCloseConfirm(): MsgChannelCloseConfirm { + return { + portId: "", + channelId: "", + proofInit: new Uint8Array(), + proofHeight: undefined, + signer: "" + }; +} + +export const MsgChannelCloseConfirm = { + encode(message: MsgChannelCloseConfirm, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.channelId !== "") { + writer.uint32(18).string(message.channelId); + } + + if (message.proofInit.length !== 0) { + writer.uint32(26).bytes(message.proofInit); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(34).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(42).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelCloseConfirm { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgChannelCloseConfirm(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.channelId = reader.string(); + break; + + case 3: + message.proofInit = reader.bytes(); + break; + + case 4: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + case 5: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgChannelCloseConfirm { + const message = createBaseMsgChannelCloseConfirm(); + message.portId = object.portId ?? ""; + message.channelId = object.channelId ?? ""; + message.proofInit = object.proofInit ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgChannelCloseConfirmResponse(): MsgChannelCloseConfirmResponse { + return {}; +} + +export const MsgChannelCloseConfirmResponse = { + encode(_: MsgChannelCloseConfirmResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgChannelCloseConfirmResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgChannelCloseConfirmResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgChannelCloseConfirmResponse { + const message = createBaseMsgChannelCloseConfirmResponse(); + return message; + } + +}; + +function createBaseMsgRecvPacket(): MsgRecvPacket { + return { + packet: undefined, + proofCommitment: new Uint8Array(), + proofHeight: undefined, + signer: "" + }; +} + +export const MsgRecvPacket = { + encode(message: MsgRecvPacket, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.packet !== undefined) { + Packet.encode(message.packet, writer.uint32(10).fork()).ldelim(); + } + + if (message.proofCommitment.length !== 0) { + writer.uint32(18).bytes(message.proofCommitment); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(34).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRecvPacket { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRecvPacket(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.packet = Packet.decode(reader, reader.uint32()); + break; + + case 2: + message.proofCommitment = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + case 4: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgRecvPacket { + const message = createBaseMsgRecvPacket(); + message.packet = object.packet !== undefined && object.packet !== null ? Packet.fromPartial(object.packet) : undefined; + message.proofCommitment = object.proofCommitment ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgRecvPacketResponse(): MsgRecvPacketResponse { + return {}; +} + +export const MsgRecvPacketResponse = { + encode(_: MsgRecvPacketResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRecvPacketResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRecvPacketResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgRecvPacketResponse { + const message = createBaseMsgRecvPacketResponse(); + return message; + } + +}; + +function createBaseMsgTimeout(): MsgTimeout { + return { + packet: undefined, + proofUnreceived: new Uint8Array(), + proofHeight: undefined, + nextSequenceRecv: Long.UZERO, + signer: "" + }; +} + +export const MsgTimeout = { + encode(message: MsgTimeout, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.packet !== undefined) { + Packet.encode(message.packet, writer.uint32(10).fork()).ldelim(); + } + + if (message.proofUnreceived.length !== 0) { + writer.uint32(18).bytes(message.proofUnreceived); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + if (!message.nextSequenceRecv.isZero()) { + writer.uint32(32).uint64(message.nextSequenceRecv); + } + + if (message.signer !== "") { + writer.uint32(42).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgTimeout { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgTimeout(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.packet = Packet.decode(reader, reader.uint32()); + break; + + case 2: + message.proofUnreceived = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + case 4: + message.nextSequenceRecv = (reader.uint64() as Long); + break; + + case 5: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgTimeout { + const message = createBaseMsgTimeout(); + message.packet = object.packet !== undefined && object.packet !== null ? Packet.fromPartial(object.packet) : undefined; + message.proofUnreceived = object.proofUnreceived ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + message.nextSequenceRecv = object.nextSequenceRecv !== undefined && object.nextSequenceRecv !== null ? Long.fromValue(object.nextSequenceRecv) : Long.UZERO; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgTimeoutResponse(): MsgTimeoutResponse { + return {}; +} + +export const MsgTimeoutResponse = { + encode(_: MsgTimeoutResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgTimeoutResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgTimeoutResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgTimeoutResponse { + const message = createBaseMsgTimeoutResponse(); + return message; + } + +}; + +function createBaseMsgTimeoutOnClose(): MsgTimeoutOnClose { + return { + packet: undefined, + proofUnreceived: new Uint8Array(), + proofClose: new Uint8Array(), + proofHeight: undefined, + nextSequenceRecv: Long.UZERO, + signer: "" + }; +} + +export const MsgTimeoutOnClose = { + encode(message: MsgTimeoutOnClose, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.packet !== undefined) { + Packet.encode(message.packet, writer.uint32(10).fork()).ldelim(); + } + + if (message.proofUnreceived.length !== 0) { + writer.uint32(18).bytes(message.proofUnreceived); + } + + if (message.proofClose.length !== 0) { + writer.uint32(26).bytes(message.proofClose); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(34).fork()).ldelim(); + } + + if (!message.nextSequenceRecv.isZero()) { + writer.uint32(40).uint64(message.nextSequenceRecv); + } + + if (message.signer !== "") { + writer.uint32(50).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgTimeoutOnClose { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgTimeoutOnClose(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.packet = Packet.decode(reader, reader.uint32()); + break; + + case 2: + message.proofUnreceived = reader.bytes(); + break; + + case 3: + message.proofClose = reader.bytes(); + break; + + case 4: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + case 5: + message.nextSequenceRecv = (reader.uint64() as Long); + break; + + case 6: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgTimeoutOnClose { + const message = createBaseMsgTimeoutOnClose(); + message.packet = object.packet !== undefined && object.packet !== null ? Packet.fromPartial(object.packet) : undefined; + message.proofUnreceived = object.proofUnreceived ?? new Uint8Array(); + message.proofClose = object.proofClose ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + message.nextSequenceRecv = object.nextSequenceRecv !== undefined && object.nextSequenceRecv !== null ? Long.fromValue(object.nextSequenceRecv) : Long.UZERO; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgTimeoutOnCloseResponse(): MsgTimeoutOnCloseResponse { + return {}; +} + +export const MsgTimeoutOnCloseResponse = { + encode(_: MsgTimeoutOnCloseResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgTimeoutOnCloseResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgTimeoutOnCloseResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgTimeoutOnCloseResponse { + const message = createBaseMsgTimeoutOnCloseResponse(); + return message; + } + +}; + +function createBaseMsgAcknowledgement(): MsgAcknowledgement { + return { + packet: undefined, + acknowledgement: new Uint8Array(), + proofAcked: new Uint8Array(), + proofHeight: undefined, + signer: "" + }; +} + +export const MsgAcknowledgement = { + encode(message: MsgAcknowledgement, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.packet !== undefined) { + Packet.encode(message.packet, writer.uint32(10).fork()).ldelim(); + } + + if (message.acknowledgement.length !== 0) { + writer.uint32(18).bytes(message.acknowledgement); + } + + if (message.proofAcked.length !== 0) { + writer.uint32(26).bytes(message.proofAcked); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(34).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(42).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAcknowledgement { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAcknowledgement(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.packet = Packet.decode(reader, reader.uint32()); + break; + + case 2: + message.acknowledgement = reader.bytes(); + break; + + case 3: + message.proofAcked = reader.bytes(); + break; + + case 4: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + case 5: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgAcknowledgement { + const message = createBaseMsgAcknowledgement(); + message.packet = object.packet !== undefined && object.packet !== null ? Packet.fromPartial(object.packet) : undefined; + message.acknowledgement = object.acknowledgement ?? new Uint8Array(); + message.proofAcked = object.proofAcked ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgAcknowledgementResponse(): MsgAcknowledgementResponse { + return {}; +} + +export const MsgAcknowledgementResponse = { + encode(_: MsgAcknowledgementResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAcknowledgementResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAcknowledgementResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgAcknowledgementResponse { + const message = createBaseMsgAcknowledgementResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/client/v1/client.ts b/packages/codegen/src/ibc/core/client/v1/client.ts new file mode 100644 index 00000000..309ef44f --- /dev/null +++ b/packages/codegen/src/ibc/core/client/v1/client.ts @@ -0,0 +1,597 @@ +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { Plan, PlanSDKType } from "../../../../cosmos/upgrade/v1beta1/upgrade"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * IdentifiedClientState defines a client state with an additional client + * identifier field. + */ + +export interface IdentifiedClientState { + /** client identifier */ + clientId: string; + /** client state */ + + clientState?: Any; +} +/** + * IdentifiedClientState defines a client state with an additional client + * identifier field. + */ + +export interface IdentifiedClientStateSDKType { + client_id: string; + client_state?: AnySDKType; +} +/** + * ConsensusStateWithHeight defines a consensus state with an additional height + * field. + */ + +export interface ConsensusStateWithHeight { + /** consensus state height */ + height?: Height; + /** consensus state */ + + consensusState?: Any; +} +/** + * ConsensusStateWithHeight defines a consensus state with an additional height + * field. + */ + +export interface ConsensusStateWithHeightSDKType { + height?: HeightSDKType; + consensus_state?: AnySDKType; +} +/** + * ClientConsensusStates defines all the stored consensus states for a given + * client. + */ + +export interface ClientConsensusStates { + /** client identifier */ + clientId: string; + /** consensus states and their heights associated with the client */ + + consensusStates: ConsensusStateWithHeight[]; +} +/** + * ClientConsensusStates defines all the stored consensus states for a given + * client. + */ + +export interface ClientConsensusStatesSDKType { + client_id: string; + consensus_states: ConsensusStateWithHeightSDKType[]; +} +/** + * ClientUpdateProposal is a governance proposal. If it passes, the substitute + * client's latest consensus state is copied over to the subject client. The proposal + * handler may fail if the subject and the substitute do not match in client and + * chain parameters (with exception to latest height, frozen height, and chain-id). + */ + +export interface ClientUpdateProposal { + /** the title of the update proposal */ + title: string; + /** the description of the proposal */ + + description: string; + /** the client identifier for the client to be updated if the proposal passes */ + + subjectClientId: string; + /** + * the substitute client identifier for the client standing in for the subject + * client + */ + + substituteClientId: string; +} +/** + * ClientUpdateProposal is a governance proposal. If it passes, the substitute + * client's latest consensus state is copied over to the subject client. The proposal + * handler may fail if the subject and the substitute do not match in client and + * chain parameters (with exception to latest height, frozen height, and chain-id). + */ + +export interface ClientUpdateProposalSDKType { + title: string; + description: string; + subject_client_id: string; + substitute_client_id: string; +} +/** + * UpgradeProposal is a gov Content type for initiating an IBC breaking + * upgrade. + */ + +export interface UpgradeProposal { + title: string; + description: string; + plan?: Plan; + /** + * An UpgradedClientState must be provided to perform an IBC breaking upgrade. + * This will make the chain commit to the correct upgraded (self) client state + * before the upgrade occurs, so that connecting chains can verify that the + * new upgraded client is valid by verifying a proof on the previous version + * of the chain. This will allow IBC connections to persist smoothly across + * planned chain upgrades + */ + + upgradedClientState?: Any; +} +/** + * UpgradeProposal is a gov Content type for initiating an IBC breaking + * upgrade. + */ + +export interface UpgradeProposalSDKType { + title: string; + description: string; + plan?: PlanSDKType; + upgraded_client_state?: AnySDKType; +} +/** + * Height is a monotonically increasing data type + * that can be compared against another Height for the purposes of updating and + * freezing clients + * + * Normally the RevisionHeight is incremented at each height while keeping + * RevisionNumber the same. However some consensus algorithms may choose to + * reset the height in certain conditions e.g. hard forks, state-machine + * breaking changes In these cases, the RevisionNumber is incremented so that + * height continues to be monitonically increasing even as the RevisionHeight + * gets reset + */ + +export interface Height { + /** the revision that the client is currently on */ + revisionNumber: Long; + /** the height within the given revision */ + + revisionHeight: Long; +} +/** + * Height is a monotonically increasing data type + * that can be compared against another Height for the purposes of updating and + * freezing clients + * + * Normally the RevisionHeight is incremented at each height while keeping + * RevisionNumber the same. However some consensus algorithms may choose to + * reset the height in certain conditions e.g. hard forks, state-machine + * breaking changes In these cases, the RevisionNumber is incremented so that + * height continues to be monitonically increasing even as the RevisionHeight + * gets reset + */ + +export interface HeightSDKType { + revision_number: Long; + revision_height: Long; +} +/** Params defines the set of IBC light client parameters. */ + +export interface Params { + /** allowed_clients defines the list of allowed client state types. */ + allowedClients: string[]; +} +/** Params defines the set of IBC light client parameters. */ + +export interface ParamsSDKType { + allowed_clients: string[]; +} + +function createBaseIdentifiedClientState(): IdentifiedClientState { + return { + clientId: "", + clientState: undefined + }; +} + +export const IdentifiedClientState = { + encode(message: IdentifiedClientState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + if (message.clientState !== undefined) { + Any.encode(message.clientState, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): IdentifiedClientState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseIdentifiedClientState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.clientState = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): IdentifiedClientState { + const message = createBaseIdentifiedClientState(); + message.clientId = object.clientId ?? ""; + message.clientState = object.clientState !== undefined && object.clientState !== null ? Any.fromPartial(object.clientState) : undefined; + return message; + } + +}; + +function createBaseConsensusStateWithHeight(): ConsensusStateWithHeight { + return { + height: undefined, + consensusState: undefined + }; +} + +export const ConsensusStateWithHeight = { + encode(message: ConsensusStateWithHeight, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.height !== undefined) { + Height.encode(message.height, writer.uint32(10).fork()).ldelim(); + } + + if (message.consensusState !== undefined) { + Any.encode(message.consensusState, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusStateWithHeight { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensusStateWithHeight(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = Height.decode(reader, reader.uint32()); + break; + + case 2: + message.consensusState = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConsensusStateWithHeight { + const message = createBaseConsensusStateWithHeight(); + message.height = object.height !== undefined && object.height !== null ? Height.fromPartial(object.height) : undefined; + message.consensusState = object.consensusState !== undefined && object.consensusState !== null ? Any.fromPartial(object.consensusState) : undefined; + return message; + } + +}; + +function createBaseClientConsensusStates(): ClientConsensusStates { + return { + clientId: "", + consensusStates: [] + }; +} + +export const ClientConsensusStates = { + encode(message: ClientConsensusStates, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + for (const v of message.consensusStates) { + ConsensusStateWithHeight.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ClientConsensusStates { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientConsensusStates(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.consensusStates.push(ConsensusStateWithHeight.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ClientConsensusStates { + const message = createBaseClientConsensusStates(); + message.clientId = object.clientId ?? ""; + message.consensusStates = object.consensusStates?.map(e => ConsensusStateWithHeight.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseClientUpdateProposal(): ClientUpdateProposal { + return { + title: "", + description: "", + subjectClientId: "", + substituteClientId: "" + }; +} + +export const ClientUpdateProposal = { + encode(message: ClientUpdateProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.subjectClientId !== "") { + writer.uint32(26).string(message.subjectClientId); + } + + if (message.substituteClientId !== "") { + writer.uint32(34).string(message.substituteClientId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ClientUpdateProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientUpdateProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.subjectClientId = reader.string(); + break; + + case 4: + message.substituteClientId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ClientUpdateProposal { + const message = createBaseClientUpdateProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.subjectClientId = object.subjectClientId ?? ""; + message.substituteClientId = object.substituteClientId ?? ""; + return message; + } + +}; + +function createBaseUpgradeProposal(): UpgradeProposal { + return { + title: "", + description: "", + plan: undefined, + upgradedClientState: undefined + }; +} + +export const UpgradeProposal = { + encode(message: UpgradeProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(26).fork()).ldelim(); + } + + if (message.upgradedClientState !== undefined) { + Any.encode(message.upgradedClientState, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UpgradeProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUpgradeProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + + case 2: + message.description = reader.string(); + break; + + case 3: + message.plan = Plan.decode(reader, reader.uint32()); + break; + + case 4: + message.upgradedClientState = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): UpgradeProposal { + const message = createBaseUpgradeProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.plan = object.plan !== undefined && object.plan !== null ? Plan.fromPartial(object.plan) : undefined; + message.upgradedClientState = object.upgradedClientState !== undefined && object.upgradedClientState !== null ? Any.fromPartial(object.upgradedClientState) : undefined; + return message; + } + +}; + +function createBaseHeight(): Height { + return { + revisionNumber: Long.UZERO, + revisionHeight: Long.UZERO + }; +} + +export const Height = { + encode(message: Height, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.revisionNumber.isZero()) { + writer.uint32(8).uint64(message.revisionNumber); + } + + if (!message.revisionHeight.isZero()) { + writer.uint32(16).uint64(message.revisionHeight); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Height { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHeight(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.revisionNumber = (reader.uint64() as Long); + break; + + case 2: + message.revisionHeight = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Height { + const message = createBaseHeight(); + message.revisionNumber = object.revisionNumber !== undefined && object.revisionNumber !== null ? Long.fromValue(object.revisionNumber) : Long.UZERO; + message.revisionHeight = object.revisionHeight !== undefined && object.revisionHeight !== null ? Long.fromValue(object.revisionHeight) : Long.UZERO; + return message; + } + +}; + +function createBaseParams(): Params { + return { + allowedClients: [] + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.allowedClients) { + writer.uint32(10).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.allowedClients.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Params { + const message = createBaseParams(); + message.allowedClients = object.allowedClients?.map(e => e) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/client/v1/genesis.ts b/packages/codegen/src/ibc/core/client/v1/genesis.ts new file mode 100644 index 00000000..1eba4075 --- /dev/null +++ b/packages/codegen/src/ibc/core/client/v1/genesis.ts @@ -0,0 +1,276 @@ +import { IdentifiedClientState, IdentifiedClientStateSDKType, ClientConsensusStates, ClientConsensusStatesSDKType, Params, ParamsSDKType } from "./client"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisState defines the ibc client submodule's genesis state. */ + +export interface GenesisState { + /** client states with their corresponding identifiers */ + clients: IdentifiedClientState[]; + /** consensus states from each client */ + + clientsConsensus: ClientConsensusStates[]; + /** metadata from each client */ + + clientsMetadata: IdentifiedGenesisMetadata[]; + params?: Params; + /** create localhost on initialization */ + + createLocalhost: boolean; + /** the sequence for the next generated client identifier */ + + nextClientSequence: Long; +} +/** GenesisState defines the ibc client submodule's genesis state. */ + +export interface GenesisStateSDKType { + clients: IdentifiedClientStateSDKType[]; + clients_consensus: ClientConsensusStatesSDKType[]; + clients_metadata: IdentifiedGenesisMetadataSDKType[]; + params?: ParamsSDKType; + create_localhost: boolean; + next_client_sequence: Long; +} +/** + * GenesisMetadata defines the genesis type for metadata that clients may return + * with ExportMetadata + */ + +export interface GenesisMetadata { + /** store key of metadata without clientID-prefix */ + key: Uint8Array; + /** metadata value */ + + value: Uint8Array; +} +/** + * GenesisMetadata defines the genesis type for metadata that clients may return + * with ExportMetadata + */ + +export interface GenesisMetadataSDKType { + key: Uint8Array; + value: Uint8Array; +} +/** + * IdentifiedGenesisMetadata has the client metadata with the corresponding + * client id. + */ + +export interface IdentifiedGenesisMetadata { + clientId: string; + clientMetadata: GenesisMetadata[]; +} +/** + * IdentifiedGenesisMetadata has the client metadata with the corresponding + * client id. + */ + +export interface IdentifiedGenesisMetadataSDKType { + client_id: string; + client_metadata: GenesisMetadataSDKType[]; +} + +function createBaseGenesisState(): GenesisState { + return { + clients: [], + clientsConsensus: [], + clientsMetadata: [], + params: undefined, + createLocalhost: false, + nextClientSequence: Long.UZERO + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.clients) { + IdentifiedClientState.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.clientsConsensus) { + ClientConsensusStates.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.clientsMetadata) { + IdentifiedGenesisMetadata.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(34).fork()).ldelim(); + } + + if (message.createLocalhost === true) { + writer.uint32(40).bool(message.createLocalhost); + } + + if (!message.nextClientSequence.isZero()) { + writer.uint32(48).uint64(message.nextClientSequence); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clients.push(IdentifiedClientState.decode(reader, reader.uint32())); + break; + + case 2: + message.clientsConsensus.push(ClientConsensusStates.decode(reader, reader.uint32())); + break; + + case 3: + message.clientsMetadata.push(IdentifiedGenesisMetadata.decode(reader, reader.uint32())); + break; + + case 4: + message.params = Params.decode(reader, reader.uint32()); + break; + + case 5: + message.createLocalhost = reader.bool(); + break; + + case 6: + message.nextClientSequence = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.clients = object.clients?.map(e => IdentifiedClientState.fromPartial(e)) || []; + message.clientsConsensus = object.clientsConsensus?.map(e => ClientConsensusStates.fromPartial(e)) || []; + message.clientsMetadata = object.clientsMetadata?.map(e => IdentifiedGenesisMetadata.fromPartial(e)) || []; + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + message.createLocalhost = object.createLocalhost ?? false; + message.nextClientSequence = object.nextClientSequence !== undefined && object.nextClientSequence !== null ? Long.fromValue(object.nextClientSequence) : Long.UZERO; + return message; + } + +}; + +function createBaseGenesisMetadata(): GenesisMetadata { + return { + key: new Uint8Array(), + value: new Uint8Array() + }; +} + +export const GenesisMetadata = { + encode(message: GenesisMetadata, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisMetadata { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisMetadata(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + case 2: + message.value = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisMetadata { + const message = createBaseGenesisMetadata(); + message.key = object.key ?? new Uint8Array(); + message.value = object.value ?? new Uint8Array(); + return message; + } + +}; + +function createBaseIdentifiedGenesisMetadata(): IdentifiedGenesisMetadata { + return { + clientId: "", + clientMetadata: [] + }; +} + +export const IdentifiedGenesisMetadata = { + encode(message: IdentifiedGenesisMetadata, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + for (const v of message.clientMetadata) { + GenesisMetadata.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): IdentifiedGenesisMetadata { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseIdentifiedGenesisMetadata(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.clientMetadata.push(GenesisMetadata.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): IdentifiedGenesisMetadata { + const message = createBaseIdentifiedGenesisMetadata(); + message.clientId = object.clientId ?? ""; + message.clientMetadata = object.clientMetadata?.map(e => GenesisMetadata.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/client/v1/query.lcd.ts b/packages/codegen/src/ibc/core/client/v1/query.lcd.ts new file mode 100644 index 00000000..8a1e13e4 --- /dev/null +++ b/packages/codegen/src/ibc/core/client/v1/query.lcd.ts @@ -0,0 +1,107 @@ +import { setPaginationParams } from "../../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryClientStateRequest, QueryClientStateResponseSDKType, QueryClientStatesRequest, QueryClientStatesResponseSDKType, QueryConsensusStateRequest, QueryConsensusStateResponseSDKType, QueryConsensusStatesRequest, QueryConsensusStatesResponseSDKType, QueryClientStatusRequest, QueryClientStatusResponseSDKType, QueryClientParamsRequest, QueryClientParamsResponseSDKType, QueryUpgradedClientStateRequest, QueryUpgradedClientStateResponseSDKType, QueryUpgradedConsensusStateRequest, QueryUpgradedConsensusStateResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.clientState = this.clientState.bind(this); + this.clientStates = this.clientStates.bind(this); + this.consensusState = this.consensusState.bind(this); + this.consensusStates = this.consensusStates.bind(this); + this.clientStatus = this.clientStatus.bind(this); + this.clientParams = this.clientParams.bind(this); + this.upgradedClientState = this.upgradedClientState.bind(this); + this.upgradedConsensusState = this.upgradedConsensusState.bind(this); + } + /* ClientState queries an IBC light client. */ + + + async clientState(params: QueryClientStateRequest): Promise { + const endpoint = `ibc/core/client/v1/client_states/${params.clientId}`; + return await this.req.get(endpoint); + } + /* ClientStates queries all the IBC light clients of a chain. */ + + + async clientStates(params: QueryClientStatesRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `ibc/core/client/v1/client_states`; + return await this.req.get(endpoint, options); + } + /* ConsensusState queries a consensus state associated with a client state at + a given height. */ + + + async consensusState(params: QueryConsensusStateRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.latestHeight !== "undefined") { + options.params.latest_height = params.latestHeight; + } + + const endpoint = `ibc/core/client/v1/consensus_states/${params.clientId}/revision/${params.revisionNumber}/height/${params.revisionHeight}`; + return await this.req.get(endpoint, options); + } + /* ConsensusStates queries all the consensus state associated with a given + client. */ + + + async consensusStates(params: QueryConsensusStatesRequest): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `ibc/core/client/v1/consensus_states/${params.clientId}`; + return await this.req.get(endpoint, options); + } + /* Status queries the status of an IBC client. */ + + + async clientStatus(params: QueryClientStatusRequest): Promise { + const endpoint = `ibc/core/client/v1/client_status/${params.clientId}`; + return await this.req.get(endpoint); + } + /* ClientParams queries all parameters of the ibc client. */ + + + async clientParams(_params: QueryClientParamsRequest = {}): Promise { + const endpoint = `ibc/client/v1/params`; + return await this.req.get(endpoint); + } + /* UpgradedClientState queries an Upgraded IBC light client. */ + + + async upgradedClientState(_params: QueryUpgradedClientStateRequest = {}): Promise { + const endpoint = `ibc/core/client/v1/upgraded_client_states`; + return await this.req.get(endpoint); + } + /* UpgradedConsensusState queries an Upgraded IBC consensus state. */ + + + async upgradedConsensusState(_params: QueryUpgradedConsensusStateRequest = {}): Promise { + const endpoint = `ibc/core/client/v1/upgraded_consensus_states`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/client/v1/query.rpc.Query.ts b/packages/codegen/src/ibc/core/client/v1/query.rpc.Query.ts new file mode 100644 index 00000000..58429d53 --- /dev/null +++ b/packages/codegen/src/ibc/core/client/v1/query.rpc.Query.ts @@ -0,0 +1,141 @@ +import { Rpc } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryClientStateRequest, QueryClientStateResponse, QueryClientStatesRequest, QueryClientStatesResponse, QueryConsensusStateRequest, QueryConsensusStateResponse, QueryConsensusStatesRequest, QueryConsensusStatesResponse, QueryClientStatusRequest, QueryClientStatusResponse, QueryClientParamsRequest, QueryClientParamsResponse, QueryUpgradedClientStateRequest, QueryUpgradedClientStateResponse, QueryUpgradedConsensusStateRequest, QueryUpgradedConsensusStateResponse } from "./query"; +/** Query provides defines the gRPC querier service */ + +export interface Query { + /** ClientState queries an IBC light client. */ + clientState(request: QueryClientStateRequest): Promise; + /** ClientStates queries all the IBC light clients of a chain. */ + + clientStates(request?: QueryClientStatesRequest): Promise; + /** + * ConsensusState queries a consensus state associated with a client state at + * a given height. + */ + + consensusState(request: QueryConsensusStateRequest): Promise; + /** + * ConsensusStates queries all the consensus state associated with a given + * client. + */ + + consensusStates(request: QueryConsensusStatesRequest): Promise; + /** Status queries the status of an IBC client. */ + + clientStatus(request: QueryClientStatusRequest): Promise; + /** ClientParams queries all parameters of the ibc client. */ + + clientParams(request?: QueryClientParamsRequest): Promise; + /** UpgradedClientState queries an Upgraded IBC light client. */ + + upgradedClientState(request?: QueryUpgradedClientStateRequest): Promise; + /** UpgradedConsensusState queries an Upgraded IBC consensus state. */ + + upgradedConsensusState(request?: QueryUpgradedConsensusStateRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.clientState = this.clientState.bind(this); + this.clientStates = this.clientStates.bind(this); + this.consensusState = this.consensusState.bind(this); + this.consensusStates = this.consensusStates.bind(this); + this.clientStatus = this.clientStatus.bind(this); + this.clientParams = this.clientParams.bind(this); + this.upgradedClientState = this.upgradedClientState.bind(this); + this.upgradedConsensusState = this.upgradedConsensusState.bind(this); + } + + clientState(request: QueryClientStateRequest): Promise { + const data = QueryClientStateRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.client.v1.Query", "ClientState", data); + return promise.then(data => QueryClientStateResponse.decode(new _m0.Reader(data))); + } + + clientStates(request: QueryClientStatesRequest = { + pagination: undefined + }): Promise { + const data = QueryClientStatesRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.client.v1.Query", "ClientStates", data); + return promise.then(data => QueryClientStatesResponse.decode(new _m0.Reader(data))); + } + + consensusState(request: QueryConsensusStateRequest): Promise { + const data = QueryConsensusStateRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.client.v1.Query", "ConsensusState", data); + return promise.then(data => QueryConsensusStateResponse.decode(new _m0.Reader(data))); + } + + consensusStates(request: QueryConsensusStatesRequest): Promise { + const data = QueryConsensusStatesRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.client.v1.Query", "ConsensusStates", data); + return promise.then(data => QueryConsensusStatesResponse.decode(new _m0.Reader(data))); + } + + clientStatus(request: QueryClientStatusRequest): Promise { + const data = QueryClientStatusRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.client.v1.Query", "ClientStatus", data); + return promise.then(data => QueryClientStatusResponse.decode(new _m0.Reader(data))); + } + + clientParams(request: QueryClientParamsRequest = {}): Promise { + const data = QueryClientParamsRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.client.v1.Query", "ClientParams", data); + return promise.then(data => QueryClientParamsResponse.decode(new _m0.Reader(data))); + } + + upgradedClientState(request: QueryUpgradedClientStateRequest = {}): Promise { + const data = QueryUpgradedClientStateRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.client.v1.Query", "UpgradedClientState", data); + return promise.then(data => QueryUpgradedClientStateResponse.decode(new _m0.Reader(data))); + } + + upgradedConsensusState(request: QueryUpgradedConsensusStateRequest = {}): Promise { + const data = QueryUpgradedConsensusStateRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.client.v1.Query", "UpgradedConsensusState", data); + return promise.then(data => QueryUpgradedConsensusStateResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + clientState(request: QueryClientStateRequest): Promise { + return queryService.clientState(request); + }, + + clientStates(request?: QueryClientStatesRequest): Promise { + return queryService.clientStates(request); + }, + + consensusState(request: QueryConsensusStateRequest): Promise { + return queryService.consensusState(request); + }, + + consensusStates(request: QueryConsensusStatesRequest): Promise { + return queryService.consensusStates(request); + }, + + clientStatus(request: QueryClientStatusRequest): Promise { + return queryService.clientStatus(request); + }, + + clientParams(request?: QueryClientParamsRequest): Promise { + return queryService.clientParams(request); + }, + + upgradedClientState(request?: QueryUpgradedClientStateRequest): Promise { + return queryService.upgradedClientState(request); + }, + + upgradedConsensusState(request?: QueryUpgradedConsensusStateRequest): Promise { + return queryService.upgradedConsensusState(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/client/v1/query.ts b/packages/codegen/src/ibc/core/client/v1/query.ts new file mode 100644 index 00000000..b667aed9 --- /dev/null +++ b/packages/codegen/src/ibc/core/client/v1/query.ts @@ -0,0 +1,1095 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../../../cosmos/base/query/v1beta1/pagination"; +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { Height, HeightSDKType, IdentifiedClientState, IdentifiedClientStateSDKType, ConsensusStateWithHeight, ConsensusStateWithHeightSDKType, Params, ParamsSDKType } from "./client"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * QueryClientStateRequest is the request type for the Query/ClientState RPC + * method + */ + +export interface QueryClientStateRequest { + /** client state unique identifier */ + clientId: string; +} +/** + * QueryClientStateRequest is the request type for the Query/ClientState RPC + * method + */ + +export interface QueryClientStateRequestSDKType { + client_id: string; +} +/** + * QueryClientStateResponse is the response type for the Query/ClientState RPC + * method. Besides the client state, it includes a proof and the height from + * which the proof was retrieved. + */ + +export interface QueryClientStateResponse { + /** client state associated with the request identifier */ + clientState?: Any; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was retrieved */ + + proofHeight?: Height; +} +/** + * QueryClientStateResponse is the response type for the Query/ClientState RPC + * method. Besides the client state, it includes a proof and the height from + * which the proof was retrieved. + */ + +export interface QueryClientStateResponseSDKType { + client_state?: AnySDKType; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryClientStatesRequest is the request type for the Query/ClientStates RPC + * method + */ + +export interface QueryClientStatesRequest { + /** pagination request */ + pagination?: PageRequest; +} +/** + * QueryClientStatesRequest is the request type for the Query/ClientStates RPC + * method + */ + +export interface QueryClientStatesRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryClientStatesResponse is the response type for the Query/ClientStates RPC + * method. + */ + +export interface QueryClientStatesResponse { + /** list of stored ClientStates of the chain. */ + clientStates: IdentifiedClientState[]; + /** pagination response */ + + pagination?: PageResponse; +} +/** + * QueryClientStatesResponse is the response type for the Query/ClientStates RPC + * method. + */ + +export interface QueryClientStatesResponseSDKType { + client_states: IdentifiedClientStateSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryConsensusStateRequest is the request type for the Query/ConsensusState + * RPC method. Besides the consensus state, it includes a proof and the height + * from which the proof was retrieved. + */ + +export interface QueryConsensusStateRequest { + /** client identifier */ + clientId: string; + /** consensus state revision number */ + + revisionNumber: Long; + /** consensus state revision height */ + + revisionHeight: Long; + /** + * latest_height overrrides the height field and queries the latest stored + * ConsensusState + */ + + latestHeight: boolean; +} +/** + * QueryConsensusStateRequest is the request type for the Query/ConsensusState + * RPC method. Besides the consensus state, it includes a proof and the height + * from which the proof was retrieved. + */ + +export interface QueryConsensusStateRequestSDKType { + client_id: string; + revision_number: Long; + revision_height: Long; + latest_height: boolean; +} +/** + * QueryConsensusStateResponse is the response type for the Query/ConsensusState + * RPC method + */ + +export interface QueryConsensusStateResponse { + /** consensus state associated with the client identifier at the given height */ + consensusState?: Any; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was retrieved */ + + proofHeight?: Height; +} +/** + * QueryConsensusStateResponse is the response type for the Query/ConsensusState + * RPC method + */ + +export interface QueryConsensusStateResponseSDKType { + consensus_state?: AnySDKType; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryConsensusStatesRequest is the request type for the Query/ConsensusStates + * RPC method. + */ + +export interface QueryConsensusStatesRequest { + /** client identifier */ + clientId: string; + /** pagination request */ + + pagination?: PageRequest; +} +/** + * QueryConsensusStatesRequest is the request type for the Query/ConsensusStates + * RPC method. + */ + +export interface QueryConsensusStatesRequestSDKType { + client_id: string; + pagination?: PageRequestSDKType; +} +/** + * QueryConsensusStatesResponse is the response type for the + * Query/ConsensusStates RPC method + */ + +export interface QueryConsensusStatesResponse { + /** consensus states associated with the identifier */ + consensusStates: ConsensusStateWithHeight[]; + /** pagination response */ + + pagination?: PageResponse; +} +/** + * QueryConsensusStatesResponse is the response type for the + * Query/ConsensusStates RPC method + */ + +export interface QueryConsensusStatesResponseSDKType { + consensus_states: ConsensusStateWithHeightSDKType[]; + pagination?: PageResponseSDKType; +} +/** + * QueryClientStatusRequest is the request type for the Query/ClientStatus RPC + * method + */ + +export interface QueryClientStatusRequest { + /** client unique identifier */ + clientId: string; +} +/** + * QueryClientStatusRequest is the request type for the Query/ClientStatus RPC + * method + */ + +export interface QueryClientStatusRequestSDKType { + client_id: string; +} +/** + * QueryClientStatusResponse is the response type for the Query/ClientStatus RPC + * method. It returns the current status of the IBC client. + */ + +export interface QueryClientStatusResponse { + status: string; +} +/** + * QueryClientStatusResponse is the response type for the Query/ClientStatus RPC + * method. It returns the current status of the IBC client. + */ + +export interface QueryClientStatusResponseSDKType { + status: string; +} +/** + * QueryClientParamsRequest is the request type for the Query/ClientParams RPC + * method. + */ + +export interface QueryClientParamsRequest {} +/** + * QueryClientParamsRequest is the request type for the Query/ClientParams RPC + * method. + */ + +export interface QueryClientParamsRequestSDKType {} +/** + * QueryClientParamsResponse is the response type for the Query/ClientParams RPC + * method. + */ + +export interface QueryClientParamsResponse { + /** params defines the parameters of the module. */ + params?: Params; +} +/** + * QueryClientParamsResponse is the response type for the Query/ClientParams RPC + * method. + */ + +export interface QueryClientParamsResponseSDKType { + params?: ParamsSDKType; +} +/** + * QueryUpgradedClientStateRequest is the request type for the + * Query/UpgradedClientState RPC method + */ + +export interface QueryUpgradedClientStateRequest {} +/** + * QueryUpgradedClientStateRequest is the request type for the + * Query/UpgradedClientState RPC method + */ + +export interface QueryUpgradedClientStateRequestSDKType {} +/** + * QueryUpgradedClientStateResponse is the response type for the + * Query/UpgradedClientState RPC method. + */ + +export interface QueryUpgradedClientStateResponse { + /** client state associated with the request identifier */ + upgradedClientState?: Any; +} +/** + * QueryUpgradedClientStateResponse is the response type for the + * Query/UpgradedClientState RPC method. + */ + +export interface QueryUpgradedClientStateResponseSDKType { + upgraded_client_state?: AnySDKType; +} +/** + * QueryUpgradedConsensusStateRequest is the request type for the + * Query/UpgradedConsensusState RPC method + */ + +export interface QueryUpgradedConsensusStateRequest {} +/** + * QueryUpgradedConsensusStateRequest is the request type for the + * Query/UpgradedConsensusState RPC method + */ + +export interface QueryUpgradedConsensusStateRequestSDKType {} +/** + * QueryUpgradedConsensusStateResponse is the response type for the + * Query/UpgradedConsensusState RPC method. + */ + +export interface QueryUpgradedConsensusStateResponse { + /** Consensus state associated with the request identifier */ + upgradedConsensusState?: Any; +} +/** + * QueryUpgradedConsensusStateResponse is the response type for the + * Query/UpgradedConsensusState RPC method. + */ + +export interface QueryUpgradedConsensusStateResponseSDKType { + upgraded_consensus_state?: AnySDKType; +} + +function createBaseQueryClientStateRequest(): QueryClientStateRequest { + return { + clientId: "" + }; +} + +export const QueryClientStateRequest = { + encode(message: QueryClientStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClientStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClientStateRequest { + const message = createBaseQueryClientStateRequest(); + message.clientId = object.clientId ?? ""; + return message; + } + +}; + +function createBaseQueryClientStateResponse(): QueryClientStateResponse { + return { + clientState: undefined, + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryClientStateResponse = { + encode(message: QueryClientStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientState !== undefined) { + Any.encode(message.clientState, writer.uint32(10).fork()).ldelim(); + } + + if (message.proof.length !== 0) { + writer.uint32(18).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClientStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientState = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.proof = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClientStateResponse { + const message = createBaseQueryClientStateResponse(); + message.clientState = object.clientState !== undefined && object.clientState !== null ? Any.fromPartial(object.clientState) : undefined; + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; + +function createBaseQueryClientStatesRequest(): QueryClientStatesRequest { + return { + pagination: undefined + }; +} + +export const QueryClientStatesRequest = { + encode(message: QueryClientStatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientStatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClientStatesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClientStatesRequest { + const message = createBaseQueryClientStatesRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryClientStatesResponse(): QueryClientStatesResponse { + return { + clientStates: [], + pagination: undefined + }; +} + +export const QueryClientStatesResponse = { + encode(message: QueryClientStatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.clientStates) { + IdentifiedClientState.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientStatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClientStatesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientStates.push(IdentifiedClientState.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClientStatesResponse { + const message = createBaseQueryClientStatesResponse(); + message.clientStates = object.clientStates?.map(e => IdentifiedClientState.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryConsensusStateRequest(): QueryConsensusStateRequest { + return { + clientId: "", + revisionNumber: Long.UZERO, + revisionHeight: Long.UZERO, + latestHeight: false + }; +} + +export const QueryConsensusStateRequest = { + encode(message: QueryConsensusStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + if (!message.revisionNumber.isZero()) { + writer.uint32(16).uint64(message.revisionNumber); + } + + if (!message.revisionHeight.isZero()) { + writer.uint32(24).uint64(message.revisionHeight); + } + + if (message.latestHeight === true) { + writer.uint32(32).bool(message.latestHeight); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConsensusStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConsensusStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.revisionNumber = (reader.uint64() as Long); + break; + + case 3: + message.revisionHeight = (reader.uint64() as Long); + break; + + case 4: + message.latestHeight = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConsensusStateRequest { + const message = createBaseQueryConsensusStateRequest(); + message.clientId = object.clientId ?? ""; + message.revisionNumber = object.revisionNumber !== undefined && object.revisionNumber !== null ? Long.fromValue(object.revisionNumber) : Long.UZERO; + message.revisionHeight = object.revisionHeight !== undefined && object.revisionHeight !== null ? Long.fromValue(object.revisionHeight) : Long.UZERO; + message.latestHeight = object.latestHeight ?? false; + return message; + } + +}; + +function createBaseQueryConsensusStateResponse(): QueryConsensusStateResponse { + return { + consensusState: undefined, + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryConsensusStateResponse = { + encode(message: QueryConsensusStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.consensusState !== undefined) { + Any.encode(message.consensusState, writer.uint32(10).fork()).ldelim(); + } + + if (message.proof.length !== 0) { + writer.uint32(18).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConsensusStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConsensusStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.consensusState = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.proof = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConsensusStateResponse { + const message = createBaseQueryConsensusStateResponse(); + message.consensusState = object.consensusState !== undefined && object.consensusState !== null ? Any.fromPartial(object.consensusState) : undefined; + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; + +function createBaseQueryConsensusStatesRequest(): QueryConsensusStatesRequest { + return { + clientId: "", + pagination: undefined + }; +} + +export const QueryConsensusStatesRequest = { + encode(message: QueryConsensusStatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConsensusStatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConsensusStatesRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConsensusStatesRequest { + const message = createBaseQueryConsensusStatesRequest(); + message.clientId = object.clientId ?? ""; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryConsensusStatesResponse(): QueryConsensusStatesResponse { + return { + consensusStates: [], + pagination: undefined + }; +} + +export const QueryConsensusStatesResponse = { + encode(message: QueryConsensusStatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.consensusStates) { + ConsensusStateWithHeight.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConsensusStatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConsensusStatesResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.consensusStates.push(ConsensusStateWithHeight.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConsensusStatesResponse { + const message = createBaseQueryConsensusStatesResponse(); + message.consensusStates = object.consensusStates?.map(e => ConsensusStateWithHeight.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryClientStatusRequest(): QueryClientStatusRequest { + return { + clientId: "" + }; +} + +export const QueryClientStatusRequest = { + encode(message: QueryClientStatusRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientStatusRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClientStatusRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClientStatusRequest { + const message = createBaseQueryClientStatusRequest(); + message.clientId = object.clientId ?? ""; + return message; + } + +}; + +function createBaseQueryClientStatusResponse(): QueryClientStatusResponse { + return { + status: "" + }; +} + +export const QueryClientStatusResponse = { + encode(message: QueryClientStatusResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.status !== "") { + writer.uint32(10).string(message.status); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientStatusResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClientStatusResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.status = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClientStatusResponse { + const message = createBaseQueryClientStatusResponse(); + message.status = object.status ?? ""; + return message; + } + +}; + +function createBaseQueryClientParamsRequest(): QueryClientParamsRequest { + return {}; +} + +export const QueryClientParamsRequest = { + encode(_: QueryClientParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClientParamsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryClientParamsRequest { + const message = createBaseQueryClientParamsRequest(); + return message; + } + +}; + +function createBaseQueryClientParamsResponse(): QueryClientParamsResponse { + return { + params: undefined + }; +} + +export const QueryClientParamsResponse = { + encode(message: QueryClientParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClientParamsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClientParamsResponse { + const message = createBaseQueryClientParamsResponse(); + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; + +function createBaseQueryUpgradedClientStateRequest(): QueryUpgradedClientStateRequest { + return {}; +} + +export const QueryUpgradedClientStateRequest = { + encode(_: QueryUpgradedClientStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedClientStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUpgradedClientStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryUpgradedClientStateRequest { + const message = createBaseQueryUpgradedClientStateRequest(); + return message; + } + +}; + +function createBaseQueryUpgradedClientStateResponse(): QueryUpgradedClientStateResponse { + return { + upgradedClientState: undefined + }; +} + +export const QueryUpgradedClientStateResponse = { + encode(message: QueryUpgradedClientStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.upgradedClientState !== undefined) { + Any.encode(message.upgradedClientState, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedClientStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUpgradedClientStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.upgradedClientState = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryUpgradedClientStateResponse { + const message = createBaseQueryUpgradedClientStateResponse(); + message.upgradedClientState = object.upgradedClientState !== undefined && object.upgradedClientState !== null ? Any.fromPartial(object.upgradedClientState) : undefined; + return message; + } + +}; + +function createBaseQueryUpgradedConsensusStateRequest(): QueryUpgradedConsensusStateRequest { + return {}; +} + +export const QueryUpgradedConsensusStateRequest = { + encode(_: QueryUpgradedConsensusStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedConsensusStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUpgradedConsensusStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): QueryUpgradedConsensusStateRequest { + const message = createBaseQueryUpgradedConsensusStateRequest(); + return message; + } + +}; + +function createBaseQueryUpgradedConsensusStateResponse(): QueryUpgradedConsensusStateResponse { + return { + upgradedConsensusState: undefined + }; +} + +export const QueryUpgradedConsensusStateResponse = { + encode(message: QueryUpgradedConsensusStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.upgradedConsensusState !== undefined) { + Any.encode(message.upgradedConsensusState, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedConsensusStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUpgradedConsensusStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.upgradedConsensusState = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryUpgradedConsensusStateResponse { + const message = createBaseQueryUpgradedConsensusStateResponse(); + message.upgradedConsensusState = object.upgradedConsensusState !== undefined && object.upgradedConsensusState !== null ? Any.fromPartial(object.upgradedConsensusState) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/client/v1/tx.rpc.msg.ts b/packages/codegen/src/ibc/core/client/v1/tx.rpc.msg.ts new file mode 100644 index 00000000..3197da3f --- /dev/null +++ b/packages/codegen/src/ibc/core/client/v1/tx.rpc.msg.ts @@ -0,0 +1,54 @@ +import { Rpc } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgCreateClient, MsgCreateClientResponse, MsgUpdateClient, MsgUpdateClientResponse, MsgUpgradeClient, MsgUpgradeClientResponse, MsgSubmitMisbehaviour, MsgSubmitMisbehaviourResponse } from "./tx"; +/** Msg defines the ibc/client Msg service. */ + +export interface Msg { + /** CreateClient defines a rpc handler method for MsgCreateClient. */ + createClient(request: MsgCreateClient): Promise; + /** UpdateClient defines a rpc handler method for MsgUpdateClient. */ + + updateClient(request: MsgUpdateClient): Promise; + /** UpgradeClient defines a rpc handler method for MsgUpgradeClient. */ + + upgradeClient(request: MsgUpgradeClient): Promise; + /** SubmitMisbehaviour defines a rpc handler method for MsgSubmitMisbehaviour. */ + + submitMisbehaviour(request: MsgSubmitMisbehaviour): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.createClient = this.createClient.bind(this); + this.updateClient = this.updateClient.bind(this); + this.upgradeClient = this.upgradeClient.bind(this); + this.submitMisbehaviour = this.submitMisbehaviour.bind(this); + } + + createClient(request: MsgCreateClient): Promise { + const data = MsgCreateClient.encode(request).finish(); + const promise = this.rpc.request("ibc.core.client.v1.Msg", "CreateClient", data); + return promise.then(data => MsgCreateClientResponse.decode(new _m0.Reader(data))); + } + + updateClient(request: MsgUpdateClient): Promise { + const data = MsgUpdateClient.encode(request).finish(); + const promise = this.rpc.request("ibc.core.client.v1.Msg", "UpdateClient", data); + return promise.then(data => MsgUpdateClientResponse.decode(new _m0.Reader(data))); + } + + upgradeClient(request: MsgUpgradeClient): Promise { + const data = MsgUpgradeClient.encode(request).finish(); + const promise = this.rpc.request("ibc.core.client.v1.Msg", "UpgradeClient", data); + return promise.then(data => MsgUpgradeClientResponse.decode(new _m0.Reader(data))); + } + + submitMisbehaviour(request: MsgSubmitMisbehaviour): Promise { + const data = MsgSubmitMisbehaviour.encode(request).finish(); + const promise = this.rpc.request("ibc.core.client.v1.Msg", "SubmitMisbehaviour", data); + return promise.then(data => MsgSubmitMisbehaviourResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/client/v1/tx.ts b/packages/codegen/src/ibc/core/client/v1/tx.ts new file mode 100644 index 00000000..5eba0744 --- /dev/null +++ b/packages/codegen/src/ibc/core/client/v1/tx.ts @@ -0,0 +1,571 @@ +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** MsgCreateClient defines a message to create an IBC client */ + +export interface MsgCreateClient { + /** light client state */ + clientState?: Any; + /** + * consensus state associated with the client that corresponds to a given + * height. + */ + + consensusState?: Any; + /** signer address */ + + signer: string; +} +/** MsgCreateClient defines a message to create an IBC client */ + +export interface MsgCreateClientSDKType { + client_state?: AnySDKType; + consensus_state?: AnySDKType; + signer: string; +} +/** MsgCreateClientResponse defines the Msg/CreateClient response type. */ + +export interface MsgCreateClientResponse {} +/** MsgCreateClientResponse defines the Msg/CreateClient response type. */ + +export interface MsgCreateClientResponseSDKType {} +/** + * MsgUpdateClient defines an sdk.Msg to update a IBC client state using + * the given header. + */ + +export interface MsgUpdateClient { + /** client unique identifier */ + clientId: string; + /** header to update the light client */ + + header?: Any; + /** signer address */ + + signer: string; +} +/** + * MsgUpdateClient defines an sdk.Msg to update a IBC client state using + * the given header. + */ + +export interface MsgUpdateClientSDKType { + client_id: string; + header?: AnySDKType; + signer: string; +} +/** MsgUpdateClientResponse defines the Msg/UpdateClient response type. */ + +export interface MsgUpdateClientResponse {} +/** MsgUpdateClientResponse defines the Msg/UpdateClient response type. */ + +export interface MsgUpdateClientResponseSDKType {} +/** + * MsgUpgradeClient defines an sdk.Msg to upgrade an IBC client to a new client + * state + */ + +export interface MsgUpgradeClient { + /** client unique identifier */ + clientId: string; + /** upgraded client state */ + + clientState?: Any; + /** + * upgraded consensus state, only contains enough information to serve as a + * basis of trust in update logic + */ + + consensusState?: Any; + /** proof that old chain committed to new client */ + + proofUpgradeClient: Uint8Array; + /** proof that old chain committed to new consensus state */ + + proofUpgradeConsensusState: Uint8Array; + /** signer address */ + + signer: string; +} +/** + * MsgUpgradeClient defines an sdk.Msg to upgrade an IBC client to a new client + * state + */ + +export interface MsgUpgradeClientSDKType { + client_id: string; + client_state?: AnySDKType; + consensus_state?: AnySDKType; + proof_upgrade_client: Uint8Array; + proof_upgrade_consensus_state: Uint8Array; + signer: string; +} +/** MsgUpgradeClientResponse defines the Msg/UpgradeClient response type. */ + +export interface MsgUpgradeClientResponse {} +/** MsgUpgradeClientResponse defines the Msg/UpgradeClient response type. */ + +export interface MsgUpgradeClientResponseSDKType {} +/** + * MsgSubmitMisbehaviour defines an sdk.Msg type that submits Evidence for + * light client misbehaviour. + */ + +export interface MsgSubmitMisbehaviour { + /** client unique identifier */ + clientId: string; + /** misbehaviour used for freezing the light client */ + + misbehaviour?: Any; + /** signer address */ + + signer: string; +} +/** + * MsgSubmitMisbehaviour defines an sdk.Msg type that submits Evidence for + * light client misbehaviour. + */ + +export interface MsgSubmitMisbehaviourSDKType { + client_id: string; + misbehaviour?: AnySDKType; + signer: string; +} +/** + * MsgSubmitMisbehaviourResponse defines the Msg/SubmitMisbehaviour response + * type. + */ + +export interface MsgSubmitMisbehaviourResponse {} +/** + * MsgSubmitMisbehaviourResponse defines the Msg/SubmitMisbehaviour response + * type. + */ + +export interface MsgSubmitMisbehaviourResponseSDKType {} + +function createBaseMsgCreateClient(): MsgCreateClient { + return { + clientState: undefined, + consensusState: undefined, + signer: "" + }; +} + +export const MsgCreateClient = { + encode(message: MsgCreateClient, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientState !== undefined) { + Any.encode(message.clientState, writer.uint32(10).fork()).ldelim(); + } + + if (message.consensusState !== undefined) { + Any.encode(message.consensusState, writer.uint32(18).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(26).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateClient { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateClient(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientState = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.consensusState = Any.decode(reader, reader.uint32()); + break; + + case 3: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgCreateClient { + const message = createBaseMsgCreateClient(); + message.clientState = object.clientState !== undefined && object.clientState !== null ? Any.fromPartial(object.clientState) : undefined; + message.consensusState = object.consensusState !== undefined && object.consensusState !== null ? Any.fromPartial(object.consensusState) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgCreateClientResponse(): MsgCreateClientResponse { + return {}; +} + +export const MsgCreateClientResponse = { + encode(_: MsgCreateClientResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateClientResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateClientResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgCreateClientResponse { + const message = createBaseMsgCreateClientResponse(); + return message; + } + +}; + +function createBaseMsgUpdateClient(): MsgUpdateClient { + return { + clientId: "", + header: undefined, + signer: "" + }; +} + +export const MsgUpdateClient = { + encode(message: MsgUpdateClient, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + if (message.header !== undefined) { + Any.encode(message.header, writer.uint32(18).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(26).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateClient { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateClient(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.header = Any.decode(reader, reader.uint32()); + break; + + case 3: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpdateClient { + const message = createBaseMsgUpdateClient(); + message.clientId = object.clientId ?? ""; + message.header = object.header !== undefined && object.header !== null ? Any.fromPartial(object.header) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgUpdateClientResponse(): MsgUpdateClientResponse { + return {}; +} + +export const MsgUpdateClientResponse = { + encode(_: MsgUpdateClientResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateClientResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateClientResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpdateClientResponse { + const message = createBaseMsgUpdateClientResponse(); + return message; + } + +}; + +function createBaseMsgUpgradeClient(): MsgUpgradeClient { + return { + clientId: "", + clientState: undefined, + consensusState: undefined, + proofUpgradeClient: new Uint8Array(), + proofUpgradeConsensusState: new Uint8Array(), + signer: "" + }; +} + +export const MsgUpgradeClient = { + encode(message: MsgUpgradeClient, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + if (message.clientState !== undefined) { + Any.encode(message.clientState, writer.uint32(18).fork()).ldelim(); + } + + if (message.consensusState !== undefined) { + Any.encode(message.consensusState, writer.uint32(26).fork()).ldelim(); + } + + if (message.proofUpgradeClient.length !== 0) { + writer.uint32(34).bytes(message.proofUpgradeClient); + } + + if (message.proofUpgradeConsensusState.length !== 0) { + writer.uint32(42).bytes(message.proofUpgradeConsensusState); + } + + if (message.signer !== "") { + writer.uint32(50).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpgradeClient { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpgradeClient(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.clientState = Any.decode(reader, reader.uint32()); + break; + + case 3: + message.consensusState = Any.decode(reader, reader.uint32()); + break; + + case 4: + message.proofUpgradeClient = reader.bytes(); + break; + + case 5: + message.proofUpgradeConsensusState = reader.bytes(); + break; + + case 6: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgUpgradeClient { + const message = createBaseMsgUpgradeClient(); + message.clientId = object.clientId ?? ""; + message.clientState = object.clientState !== undefined && object.clientState !== null ? Any.fromPartial(object.clientState) : undefined; + message.consensusState = object.consensusState !== undefined && object.consensusState !== null ? Any.fromPartial(object.consensusState) : undefined; + message.proofUpgradeClient = object.proofUpgradeClient ?? new Uint8Array(); + message.proofUpgradeConsensusState = object.proofUpgradeConsensusState ?? new Uint8Array(); + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgUpgradeClientResponse(): MsgUpgradeClientResponse { + return {}; +} + +export const MsgUpgradeClientResponse = { + encode(_: MsgUpgradeClientResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpgradeClientResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpgradeClientResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgUpgradeClientResponse { + const message = createBaseMsgUpgradeClientResponse(); + return message; + } + +}; + +function createBaseMsgSubmitMisbehaviour(): MsgSubmitMisbehaviour { + return { + clientId: "", + misbehaviour: undefined, + signer: "" + }; +} + +export const MsgSubmitMisbehaviour = { + encode(message: MsgSubmitMisbehaviour, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + if (message.misbehaviour !== undefined) { + Any.encode(message.misbehaviour, writer.uint32(18).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(26).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitMisbehaviour { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSubmitMisbehaviour(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.misbehaviour = Any.decode(reader, reader.uint32()); + break; + + case 3: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgSubmitMisbehaviour { + const message = createBaseMsgSubmitMisbehaviour(); + message.clientId = object.clientId ?? ""; + message.misbehaviour = object.misbehaviour !== undefined && object.misbehaviour !== null ? Any.fromPartial(object.misbehaviour) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgSubmitMisbehaviourResponse(): MsgSubmitMisbehaviourResponse { + return {}; +} + +export const MsgSubmitMisbehaviourResponse = { + encode(_: MsgSubmitMisbehaviourResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSubmitMisbehaviourResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSubmitMisbehaviourResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgSubmitMisbehaviourResponse { + const message = createBaseMsgSubmitMisbehaviourResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/commitment/v1/commitment.ts b/packages/codegen/src/ibc/core/commitment/v1/commitment.ts new file mode 100644 index 00000000..72d8725d --- /dev/null +++ b/packages/codegen/src/ibc/core/commitment/v1/commitment.ts @@ -0,0 +1,257 @@ +import { CommitmentProof, CommitmentProofSDKType } from "../../../../confio/proofs"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * MerkleRoot defines a merkle root hash. + * In the Cosmos SDK, the AppHash of a block header becomes the root. + */ + +export interface MerkleRoot { + hash: Uint8Array; +} +/** + * MerkleRoot defines a merkle root hash. + * In the Cosmos SDK, the AppHash of a block header becomes the root. + */ + +export interface MerkleRootSDKType { + hash: Uint8Array; +} +/** + * MerklePrefix is merkle path prefixed to the key. + * The constructed key from the Path and the key will be append(Path.KeyPath, + * append(Path.KeyPrefix, key...)) + */ + +export interface MerklePrefix { + keyPrefix: Uint8Array; +} +/** + * MerklePrefix is merkle path prefixed to the key. + * The constructed key from the Path and the key will be append(Path.KeyPath, + * append(Path.KeyPrefix, key...)) + */ + +export interface MerklePrefixSDKType { + key_prefix: Uint8Array; +} +/** + * MerklePath is the path used to verify commitment proofs, which can be an + * arbitrary structured object (defined by a commitment type). + * MerklePath is represented from root-to-leaf + */ + +export interface MerklePath { + keyPath: string[]; +} +/** + * MerklePath is the path used to verify commitment proofs, which can be an + * arbitrary structured object (defined by a commitment type). + * MerklePath is represented from root-to-leaf + */ + +export interface MerklePathSDKType { + key_path: string[]; +} +/** + * MerkleProof is a wrapper type over a chain of CommitmentProofs. + * It demonstrates membership or non-membership for an element or set of + * elements, verifiable in conjunction with a known commitment root. Proofs + * should be succinct. + * MerkleProofs are ordered from leaf-to-root + */ + +export interface MerkleProof { + proofs: CommitmentProof[]; +} +/** + * MerkleProof is a wrapper type over a chain of CommitmentProofs. + * It demonstrates membership or non-membership for an element or set of + * elements, verifiable in conjunction with a known commitment root. Proofs + * should be succinct. + * MerkleProofs are ordered from leaf-to-root + */ + +export interface MerkleProofSDKType { + proofs: CommitmentProofSDKType[]; +} + +function createBaseMerkleRoot(): MerkleRoot { + return { + hash: new Uint8Array() + }; +} + +export const MerkleRoot = { + encode(message: MerkleRoot, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hash.length !== 0) { + writer.uint32(10).bytes(message.hash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MerkleRoot { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMerkleRoot(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.hash = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MerkleRoot { + const message = createBaseMerkleRoot(); + message.hash = object.hash ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMerklePrefix(): MerklePrefix { + return { + keyPrefix: new Uint8Array() + }; +} + +export const MerklePrefix = { + encode(message: MerklePrefix, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.keyPrefix.length !== 0) { + writer.uint32(10).bytes(message.keyPrefix); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MerklePrefix { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMerklePrefix(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.keyPrefix = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MerklePrefix { + const message = createBaseMerklePrefix(); + message.keyPrefix = object.keyPrefix ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMerklePath(): MerklePath { + return { + keyPath: [] + }; +} + +export const MerklePath = { + encode(message: MerklePath, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.keyPath) { + writer.uint32(10).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MerklePath { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMerklePath(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.keyPath.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MerklePath { + const message = createBaseMerklePath(); + message.keyPath = object.keyPath?.map(e => e) || []; + return message; + } + +}; + +function createBaseMerkleProof(): MerkleProof { + return { + proofs: [] + }; +} + +export const MerkleProof = { + encode(message: MerkleProof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.proofs) { + CommitmentProof.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MerkleProof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMerkleProof(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.proofs.push(CommitmentProof.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MerkleProof { + const message = createBaseMerkleProof(); + message.proofs = object.proofs?.map(e => CommitmentProof.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/connection/v1/connection.ts b/packages/codegen/src/ibc/core/connection/v1/connection.ts new file mode 100644 index 00000000..1c834d59 --- /dev/null +++ b/packages/codegen/src/ibc/core/connection/v1/connection.ts @@ -0,0 +1,685 @@ +import { MerklePrefix, MerklePrefixSDKType } from "../../commitment/v1/commitment"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * State defines if a connection is in one of the following states: + * INIT, TRYOPEN, OPEN or UNINITIALIZED. + */ + +export enum State { + /** STATE_UNINITIALIZED_UNSPECIFIED - Default State */ + STATE_UNINITIALIZED_UNSPECIFIED = 0, + + /** STATE_INIT - A connection end has just started the opening handshake. */ + STATE_INIT = 1, + + /** + * STATE_TRYOPEN - A connection end has acknowledged the handshake step on the counterparty + * chain. + */ + STATE_TRYOPEN = 2, + + /** STATE_OPEN - A connection end has completed the handshake. */ + STATE_OPEN = 3, + UNRECOGNIZED = -1, +} +export const StateSDKType = State; +export function stateFromJSON(object: any): State { + switch (object) { + case 0: + case "STATE_UNINITIALIZED_UNSPECIFIED": + return State.STATE_UNINITIALIZED_UNSPECIFIED; + + case 1: + case "STATE_INIT": + return State.STATE_INIT; + + case 2: + case "STATE_TRYOPEN": + return State.STATE_TRYOPEN; + + case 3: + case "STATE_OPEN": + return State.STATE_OPEN; + + case -1: + case "UNRECOGNIZED": + default: + return State.UNRECOGNIZED; + } +} +export function stateToJSON(object: State): string { + switch (object) { + case State.STATE_UNINITIALIZED_UNSPECIFIED: + return "STATE_UNINITIALIZED_UNSPECIFIED"; + + case State.STATE_INIT: + return "STATE_INIT"; + + case State.STATE_TRYOPEN: + return "STATE_TRYOPEN"; + + case State.STATE_OPEN: + return "STATE_OPEN"; + + case State.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * ConnectionEnd defines a stateful object on a chain connected to another + * separate one. + * NOTE: there must only be 2 defined ConnectionEnds to establish + * a connection between two chains. + */ + +export interface ConnectionEnd { + /** client associated with this connection. */ + clientId: string; + /** + * IBC version which can be utilised to determine encodings or protocols for + * channels or packets utilising this connection. + */ + + versions: Version[]; + /** current state of the connection end. */ + + state: State; + /** counterparty chain associated with this connection. */ + + counterparty?: Counterparty; + /** + * delay period that must pass before a consensus state can be used for + * packet-verification NOTE: delay period logic is only implemented by some + * clients. + */ + + delayPeriod: Long; +} +/** + * ConnectionEnd defines a stateful object on a chain connected to another + * separate one. + * NOTE: there must only be 2 defined ConnectionEnds to establish + * a connection between two chains. + */ + +export interface ConnectionEndSDKType { + client_id: string; + versions: VersionSDKType[]; + state: State; + counterparty?: CounterpartySDKType; + delay_period: Long; +} +/** + * IdentifiedConnection defines a connection with additional connection + * identifier field. + */ + +export interface IdentifiedConnection { + /** connection identifier. */ + id: string; + /** client associated with this connection. */ + + clientId: string; + /** + * IBC version which can be utilised to determine encodings or protocols for + * channels or packets utilising this connection + */ + + versions: Version[]; + /** current state of the connection end. */ + + state: State; + /** counterparty chain associated with this connection. */ + + counterparty?: Counterparty; + /** delay period associated with this connection. */ + + delayPeriod: Long; +} +/** + * IdentifiedConnection defines a connection with additional connection + * identifier field. + */ + +export interface IdentifiedConnectionSDKType { + id: string; + client_id: string; + versions: VersionSDKType[]; + state: State; + counterparty?: CounterpartySDKType; + delay_period: Long; +} +/** Counterparty defines the counterparty chain associated with a connection end. */ + +export interface Counterparty { + /** + * identifies the client on the counterparty chain associated with a given + * connection. + */ + clientId: string; + /** + * identifies the connection end on the counterparty chain associated with a + * given connection. + */ + + connectionId: string; + /** commitment merkle prefix of the counterparty chain. */ + + prefix?: MerklePrefix; +} +/** Counterparty defines the counterparty chain associated with a connection end. */ + +export interface CounterpartySDKType { + client_id: string; + connection_id: string; + prefix?: MerklePrefixSDKType; +} +/** ClientPaths define all the connection paths for a client state. */ + +export interface ClientPaths { + /** list of connection paths */ + paths: string[]; +} +/** ClientPaths define all the connection paths for a client state. */ + +export interface ClientPathsSDKType { + paths: string[]; +} +/** ConnectionPaths define all the connection paths for a given client state. */ + +export interface ConnectionPaths { + /** client state unique identifier */ + clientId: string; + /** list of connection paths */ + + paths: string[]; +} +/** ConnectionPaths define all the connection paths for a given client state. */ + +export interface ConnectionPathsSDKType { + client_id: string; + paths: string[]; +} +/** + * Version defines the versioning scheme used to negotiate the IBC verison in + * the connection handshake. + */ + +export interface Version { + /** unique version identifier */ + identifier: string; + /** list of features compatible with the specified identifier */ + + features: string[]; +} +/** + * Version defines the versioning scheme used to negotiate the IBC verison in + * the connection handshake. + */ + +export interface VersionSDKType { + identifier: string; + features: string[]; +} +/** Params defines the set of Connection parameters. */ + +export interface Params { + /** + * maximum expected time per block (in nanoseconds), used to enforce block delay. This parameter should reflect the + * largest amount of time that the chain might reasonably take to produce the next block under normal operating + * conditions. A safe choice is 3-5x the expected time per block. + */ + maxExpectedTimePerBlock: Long; +} +/** Params defines the set of Connection parameters. */ + +export interface ParamsSDKType { + max_expected_time_per_block: Long; +} + +function createBaseConnectionEnd(): ConnectionEnd { + return { + clientId: "", + versions: [], + state: 0, + counterparty: undefined, + delayPeriod: Long.UZERO + }; +} + +export const ConnectionEnd = { + encode(message: ConnectionEnd, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + for (const v of message.versions) { + Version.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.state !== 0) { + writer.uint32(24).int32(message.state); + } + + if (message.counterparty !== undefined) { + Counterparty.encode(message.counterparty, writer.uint32(34).fork()).ldelim(); + } + + if (!message.delayPeriod.isZero()) { + writer.uint32(40).uint64(message.delayPeriod); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConnectionEnd { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConnectionEnd(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.versions.push(Version.decode(reader, reader.uint32())); + break; + + case 3: + message.state = (reader.int32() as any); + break; + + case 4: + message.counterparty = Counterparty.decode(reader, reader.uint32()); + break; + + case 5: + message.delayPeriod = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConnectionEnd { + const message = createBaseConnectionEnd(); + message.clientId = object.clientId ?? ""; + message.versions = object.versions?.map(e => Version.fromPartial(e)) || []; + message.state = object.state ?? 0; + message.counterparty = object.counterparty !== undefined && object.counterparty !== null ? Counterparty.fromPartial(object.counterparty) : undefined; + message.delayPeriod = object.delayPeriod !== undefined && object.delayPeriod !== null ? Long.fromValue(object.delayPeriod) : Long.UZERO; + return message; + } + +}; + +function createBaseIdentifiedConnection(): IdentifiedConnection { + return { + id: "", + clientId: "", + versions: [], + state: 0, + counterparty: undefined, + delayPeriod: Long.UZERO + }; +} + +export const IdentifiedConnection = { + encode(message: IdentifiedConnection, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + + if (message.clientId !== "") { + writer.uint32(18).string(message.clientId); + } + + for (const v of message.versions) { + Version.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + if (message.state !== 0) { + writer.uint32(32).int32(message.state); + } + + if (message.counterparty !== undefined) { + Counterparty.encode(message.counterparty, writer.uint32(42).fork()).ldelim(); + } + + if (!message.delayPeriod.isZero()) { + writer.uint32(48).uint64(message.delayPeriod); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): IdentifiedConnection { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseIdentifiedConnection(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.id = reader.string(); + break; + + case 2: + message.clientId = reader.string(); + break; + + case 3: + message.versions.push(Version.decode(reader, reader.uint32())); + break; + + case 4: + message.state = (reader.int32() as any); + break; + + case 5: + message.counterparty = Counterparty.decode(reader, reader.uint32()); + break; + + case 6: + message.delayPeriod = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): IdentifiedConnection { + const message = createBaseIdentifiedConnection(); + message.id = object.id ?? ""; + message.clientId = object.clientId ?? ""; + message.versions = object.versions?.map(e => Version.fromPartial(e)) || []; + message.state = object.state ?? 0; + message.counterparty = object.counterparty !== undefined && object.counterparty !== null ? Counterparty.fromPartial(object.counterparty) : undefined; + message.delayPeriod = object.delayPeriod !== undefined && object.delayPeriod !== null ? Long.fromValue(object.delayPeriod) : Long.UZERO; + return message; + } + +}; + +function createBaseCounterparty(): Counterparty { + return { + clientId: "", + connectionId: "", + prefix: undefined + }; +} + +export const Counterparty = { + encode(message: Counterparty, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + if (message.connectionId !== "") { + writer.uint32(18).string(message.connectionId); + } + + if (message.prefix !== undefined) { + MerklePrefix.encode(message.prefix, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Counterparty { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCounterparty(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.connectionId = reader.string(); + break; + + case 3: + message.prefix = MerklePrefix.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Counterparty { + const message = createBaseCounterparty(); + message.clientId = object.clientId ?? ""; + message.connectionId = object.connectionId ?? ""; + message.prefix = object.prefix !== undefined && object.prefix !== null ? MerklePrefix.fromPartial(object.prefix) : undefined; + return message; + } + +}; + +function createBaseClientPaths(): ClientPaths { + return { + paths: [] + }; +} + +export const ClientPaths = { + encode(message: ClientPaths, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.paths) { + writer.uint32(10).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ClientPaths { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientPaths(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.paths.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ClientPaths { + const message = createBaseClientPaths(); + message.paths = object.paths?.map(e => e) || []; + return message; + } + +}; + +function createBaseConnectionPaths(): ConnectionPaths { + return { + clientId: "", + paths: [] + }; +} + +export const ConnectionPaths = { + encode(message: ConnectionPaths, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + for (const v of message.paths) { + writer.uint32(18).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConnectionPaths { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConnectionPaths(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.paths.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConnectionPaths { + const message = createBaseConnectionPaths(); + message.clientId = object.clientId ?? ""; + message.paths = object.paths?.map(e => e) || []; + return message; + } + +}; + +function createBaseVersion(): Version { + return { + identifier: "", + features: [] + }; +} + +export const Version = { + encode(message: Version, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.identifier !== "") { + writer.uint32(10).string(message.identifier); + } + + for (const v of message.features) { + writer.uint32(18).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Version { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVersion(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.identifier = reader.string(); + break; + + case 2: + message.features.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Version { + const message = createBaseVersion(); + message.identifier = object.identifier ?? ""; + message.features = object.features?.map(e => e) || []; + return message; + } + +}; + +function createBaseParams(): Params { + return { + maxExpectedTimePerBlock: Long.UZERO + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.maxExpectedTimePerBlock.isZero()) { + writer.uint32(8).uint64(message.maxExpectedTimePerBlock); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.maxExpectedTimePerBlock = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Params { + const message = createBaseParams(); + message.maxExpectedTimePerBlock = object.maxExpectedTimePerBlock !== undefined && object.maxExpectedTimePerBlock !== null ? Long.fromValue(object.maxExpectedTimePerBlock) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/connection/v1/genesis.ts b/packages/codegen/src/ibc/core/connection/v1/genesis.ts new file mode 100644 index 00000000..eb9701e6 --- /dev/null +++ b/packages/codegen/src/ibc/core/connection/v1/genesis.ts @@ -0,0 +1,96 @@ +import { IdentifiedConnection, IdentifiedConnectionSDKType, ConnectionPaths, ConnectionPathsSDKType, Params, ParamsSDKType } from "./connection"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** GenesisState defines the ibc connection submodule's genesis state. */ + +export interface GenesisState { + connections: IdentifiedConnection[]; + clientConnectionPaths: ConnectionPaths[]; + /** the sequence for the next generated connection identifier */ + + nextConnectionSequence: Long; + params?: Params; +} +/** GenesisState defines the ibc connection submodule's genesis state. */ + +export interface GenesisStateSDKType { + connections: IdentifiedConnectionSDKType[]; + client_connection_paths: ConnectionPathsSDKType[]; + next_connection_sequence: Long; + params?: ParamsSDKType; +} + +function createBaseGenesisState(): GenesisState { + return { + connections: [], + clientConnectionPaths: [], + nextConnectionSequence: Long.UZERO, + params: undefined + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.connections) { + IdentifiedConnection.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.clientConnectionPaths) { + ConnectionPaths.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (!message.nextConnectionSequence.isZero()) { + writer.uint32(24).uint64(message.nextConnectionSequence); + } + + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.connections.push(IdentifiedConnection.decode(reader, reader.uint32())); + break; + + case 2: + message.clientConnectionPaths.push(ConnectionPaths.decode(reader, reader.uint32())); + break; + + case 3: + message.nextConnectionSequence = (reader.uint64() as Long); + break; + + case 4: + message.params = Params.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.connections = object.connections?.map(e => IdentifiedConnection.fromPartial(e)) || []; + message.clientConnectionPaths = object.clientConnectionPaths?.map(e => ConnectionPaths.fromPartial(e)) || []; + message.nextConnectionSequence = object.nextConnectionSequence !== undefined && object.nextConnectionSequence !== null ? Long.fromValue(object.nextConnectionSequence) : Long.UZERO; + message.params = object.params !== undefined && object.params !== null ? Params.fromPartial(object.params) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/connection/v1/query.lcd.ts b/packages/codegen/src/ibc/core/connection/v1/query.lcd.ts new file mode 100644 index 00000000..3f979295 --- /dev/null +++ b/packages/codegen/src/ibc/core/connection/v1/query.lcd.ts @@ -0,0 +1,68 @@ +import { setPaginationParams } from "../../../../helpers"; +import { LCDClient } from "@osmonauts/lcd"; +import { QueryConnectionRequest, QueryConnectionResponseSDKType, QueryConnectionsRequest, QueryConnectionsResponseSDKType, QueryClientConnectionsRequest, QueryClientConnectionsResponseSDKType, QueryConnectionClientStateRequest, QueryConnectionClientStateResponseSDKType, QueryConnectionConsensusStateRequest, QueryConnectionConsensusStateResponseSDKType } from "./query"; +export class LCDQueryClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.connection = this.connection.bind(this); + this.connections = this.connections.bind(this); + this.clientConnections = this.clientConnections.bind(this); + this.connectionClientState = this.connectionClientState.bind(this); + this.connectionConsensusState = this.connectionConsensusState.bind(this); + } + /* Connection queries an IBC connection end. */ + + + async connection(params: QueryConnectionRequest): Promise { + const endpoint = `ibc/core/connection/v1/connections/${params.connectionId}`; + return await this.req.get(endpoint); + } + /* Connections queries all the IBC connections of a chain. */ + + + async connections(params: QueryConnectionsRequest = { + pagination: undefined + }): Promise { + const options: any = { + params: {} + }; + + if (typeof params?.pagination !== "undefined") { + setPaginationParams(options, params.pagination); + } + + const endpoint = `ibc/core/connection/v1/connections`; + return await this.req.get(endpoint, options); + } + /* ClientConnections queries the connection paths associated with a client + state. */ + + + async clientConnections(params: QueryClientConnectionsRequest): Promise { + const endpoint = `ibc/core/connection/v1/client_connections/${params.clientId}`; + return await this.req.get(endpoint); + } + /* ConnectionClientState queries the client state associated with the + connection. */ + + + async connectionClientState(params: QueryConnectionClientStateRequest): Promise { + const endpoint = `ibc/core/connection/v1/connections/${params.connectionId}/client_state`; + return await this.req.get(endpoint); + } + /* ConnectionConsensusState queries the consensus state associated with the + connection. */ + + + async connectionConsensusState(params: QueryConnectionConsensusStateRequest): Promise { + const endpoint = `ibc/core/connection/v1/connections/${params.connectionId}/consensus_state/revision/${params.revisionNumber}/height/${params.revisionHeight}`; + return await this.req.get(endpoint); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/connection/v1/query.rpc.Query.ts b/packages/codegen/src/ibc/core/connection/v1/query.rpc.Query.ts new file mode 100644 index 00000000..e3af26e1 --- /dev/null +++ b/packages/codegen/src/ibc/core/connection/v1/query.rpc.Query.ts @@ -0,0 +1,102 @@ +import { Rpc } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryConnectionRequest, QueryConnectionResponse, QueryConnectionsRequest, QueryConnectionsResponse, QueryClientConnectionsRequest, QueryClientConnectionsResponse, QueryConnectionClientStateRequest, QueryConnectionClientStateResponse, QueryConnectionConsensusStateRequest, QueryConnectionConsensusStateResponse } from "./query"; +/** Query provides defines the gRPC querier service */ + +export interface Query { + /** Connection queries an IBC connection end. */ + connection(request: QueryConnectionRequest): Promise; + /** Connections queries all the IBC connections of a chain. */ + + connections(request?: QueryConnectionsRequest): Promise; + /** + * ClientConnections queries the connection paths associated with a client + * state. + */ + + clientConnections(request: QueryClientConnectionsRequest): Promise; + /** + * ConnectionClientState queries the client state associated with the + * connection. + */ + + connectionClientState(request: QueryConnectionClientStateRequest): Promise; + /** + * ConnectionConsensusState queries the consensus state associated with the + * connection. + */ + + connectionConsensusState(request: QueryConnectionConsensusStateRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.connection = this.connection.bind(this); + this.connections = this.connections.bind(this); + this.clientConnections = this.clientConnections.bind(this); + this.connectionClientState = this.connectionClientState.bind(this); + this.connectionConsensusState = this.connectionConsensusState.bind(this); + } + + connection(request: QueryConnectionRequest): Promise { + const data = QueryConnectionRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.connection.v1.Query", "Connection", data); + return promise.then(data => QueryConnectionResponse.decode(new _m0.Reader(data))); + } + + connections(request: QueryConnectionsRequest = { + pagination: undefined + }): Promise { + const data = QueryConnectionsRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.connection.v1.Query", "Connections", data); + return promise.then(data => QueryConnectionsResponse.decode(new _m0.Reader(data))); + } + + clientConnections(request: QueryClientConnectionsRequest): Promise { + const data = QueryClientConnectionsRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.connection.v1.Query", "ClientConnections", data); + return promise.then(data => QueryClientConnectionsResponse.decode(new _m0.Reader(data))); + } + + connectionClientState(request: QueryConnectionClientStateRequest): Promise { + const data = QueryConnectionClientStateRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.connection.v1.Query", "ConnectionClientState", data); + return promise.then(data => QueryConnectionClientStateResponse.decode(new _m0.Reader(data))); + } + + connectionConsensusState(request: QueryConnectionConsensusStateRequest): Promise { + const data = QueryConnectionConsensusStateRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.connection.v1.Query", "ConnectionConsensusState", data); + return promise.then(data => QueryConnectionConsensusStateResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + connection(request: QueryConnectionRequest): Promise { + return queryService.connection(request); + }, + + connections(request?: QueryConnectionsRequest): Promise { + return queryService.connections(request); + }, + + clientConnections(request: QueryClientConnectionsRequest): Promise { + return queryService.clientConnections(request); + }, + + connectionClientState(request: QueryConnectionClientStateRequest): Promise { + return queryService.connectionClientState(request); + }, + + connectionConsensusState(request: QueryConnectionConsensusStateRequest): Promise { + return queryService.connectionConsensusState(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/connection/v1/query.ts b/packages/codegen/src/ibc/core/connection/v1/query.ts new file mode 100644 index 00000000..857da006 --- /dev/null +++ b/packages/codegen/src/ibc/core/connection/v1/query.ts @@ -0,0 +1,805 @@ +import { PageRequest, PageRequestSDKType, PageResponse, PageResponseSDKType } from "../../../../cosmos/base/query/v1beta1/pagination"; +import { ConnectionEnd, ConnectionEndSDKType, IdentifiedConnection, IdentifiedConnectionSDKType } from "./connection"; +import { Height, HeightSDKType, IdentifiedClientState, IdentifiedClientStateSDKType } from "../../client/v1/client"; +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../../../helpers"; +/** + * QueryConnectionRequest is the request type for the Query/Connection RPC + * method + */ + +export interface QueryConnectionRequest { + /** connection unique identifier */ + connectionId: string; +} +/** + * QueryConnectionRequest is the request type for the Query/Connection RPC + * method + */ + +export interface QueryConnectionRequestSDKType { + connection_id: string; +} +/** + * QueryConnectionResponse is the response type for the Query/Connection RPC + * method. Besides the connection end, it includes a proof and the height from + * which the proof was retrieved. + */ + +export interface QueryConnectionResponse { + /** connection associated with the request identifier */ + connection?: ConnectionEnd; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was retrieved */ + + proofHeight?: Height; +} +/** + * QueryConnectionResponse is the response type for the Query/Connection RPC + * method. Besides the connection end, it includes a proof and the height from + * which the proof was retrieved. + */ + +export interface QueryConnectionResponseSDKType { + connection?: ConnectionEndSDKType; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryConnectionsRequest is the request type for the Query/Connections RPC + * method + */ + +export interface QueryConnectionsRequest { + pagination?: PageRequest; +} +/** + * QueryConnectionsRequest is the request type for the Query/Connections RPC + * method + */ + +export interface QueryConnectionsRequestSDKType { + pagination?: PageRequestSDKType; +} +/** + * QueryConnectionsResponse is the response type for the Query/Connections RPC + * method. + */ + +export interface QueryConnectionsResponse { + /** list of stored connections of the chain. */ + connections: IdentifiedConnection[]; + /** pagination response */ + + pagination?: PageResponse; + /** query block height */ + + height?: Height; +} +/** + * QueryConnectionsResponse is the response type for the Query/Connections RPC + * method. + */ + +export interface QueryConnectionsResponseSDKType { + connections: IdentifiedConnectionSDKType[]; + pagination?: PageResponseSDKType; + height?: HeightSDKType; +} +/** + * QueryClientConnectionsRequest is the request type for the + * Query/ClientConnections RPC method + */ + +export interface QueryClientConnectionsRequest { + /** client identifier associated with a connection */ + clientId: string; +} +/** + * QueryClientConnectionsRequest is the request type for the + * Query/ClientConnections RPC method + */ + +export interface QueryClientConnectionsRequestSDKType { + client_id: string; +} +/** + * QueryClientConnectionsResponse is the response type for the + * Query/ClientConnections RPC method + */ + +export interface QueryClientConnectionsResponse { + /** slice of all the connection paths associated with a client. */ + connectionPaths: string[]; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was generated */ + + proofHeight?: Height; +} +/** + * QueryClientConnectionsResponse is the response type for the + * Query/ClientConnections RPC method + */ + +export interface QueryClientConnectionsResponseSDKType { + connection_paths: string[]; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryConnectionClientStateRequest is the request type for the + * Query/ConnectionClientState RPC method + */ + +export interface QueryConnectionClientStateRequest { + /** connection identifier */ + connectionId: string; +} +/** + * QueryConnectionClientStateRequest is the request type for the + * Query/ConnectionClientState RPC method + */ + +export interface QueryConnectionClientStateRequestSDKType { + connection_id: string; +} +/** + * QueryConnectionClientStateResponse is the response type for the + * Query/ConnectionClientState RPC method + */ + +export interface QueryConnectionClientStateResponse { + /** client state associated with the channel */ + identifiedClientState?: IdentifiedClientState; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was retrieved */ + + proofHeight?: Height; +} +/** + * QueryConnectionClientStateResponse is the response type for the + * Query/ConnectionClientState RPC method + */ + +export interface QueryConnectionClientStateResponseSDKType { + identified_client_state?: IdentifiedClientStateSDKType; + proof: Uint8Array; + proof_height?: HeightSDKType; +} +/** + * QueryConnectionConsensusStateRequest is the request type for the + * Query/ConnectionConsensusState RPC method + */ + +export interface QueryConnectionConsensusStateRequest { + /** connection identifier */ + connectionId: string; + revisionNumber: Long; + revisionHeight: Long; +} +/** + * QueryConnectionConsensusStateRequest is the request type for the + * Query/ConnectionConsensusState RPC method + */ + +export interface QueryConnectionConsensusStateRequestSDKType { + connection_id: string; + revision_number: Long; + revision_height: Long; +} +/** + * QueryConnectionConsensusStateResponse is the response type for the + * Query/ConnectionConsensusState RPC method + */ + +export interface QueryConnectionConsensusStateResponse { + /** consensus state associated with the channel */ + consensusState?: Any; + /** client ID associated with the consensus state */ + + clientId: string; + /** merkle proof of existence */ + + proof: Uint8Array; + /** height at which the proof was retrieved */ + + proofHeight?: Height; +} +/** + * QueryConnectionConsensusStateResponse is the response type for the + * Query/ConnectionConsensusState RPC method + */ + +export interface QueryConnectionConsensusStateResponseSDKType { + consensus_state?: AnySDKType; + client_id: string; + proof: Uint8Array; + proof_height?: HeightSDKType; +} + +function createBaseQueryConnectionRequest(): QueryConnectionRequest { + return { + connectionId: "" + }; +} + +export const QueryConnectionRequest = { + encode(message: QueryConnectionRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.connectionId !== "") { + writer.uint32(10).string(message.connectionId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConnectionRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.connectionId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConnectionRequest { + const message = createBaseQueryConnectionRequest(); + message.connectionId = object.connectionId ?? ""; + return message; + } + +}; + +function createBaseQueryConnectionResponse(): QueryConnectionResponse { + return { + connection: undefined, + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryConnectionResponse = { + encode(message: QueryConnectionResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.connection !== undefined) { + ConnectionEnd.encode(message.connection, writer.uint32(10).fork()).ldelim(); + } + + if (message.proof.length !== 0) { + writer.uint32(18).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConnectionResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.connection = ConnectionEnd.decode(reader, reader.uint32()); + break; + + case 2: + message.proof = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConnectionResponse { + const message = createBaseQueryConnectionResponse(); + message.connection = object.connection !== undefined && object.connection !== null ? ConnectionEnd.fromPartial(object.connection) : undefined; + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; + +function createBaseQueryConnectionsRequest(): QueryConnectionsRequest { + return { + pagination: undefined + }; +} + +export const QueryConnectionsRequest = { + encode(message: QueryConnectionsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConnectionsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConnectionsRequest { + const message = createBaseQueryConnectionsRequest(); + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageRequest.fromPartial(object.pagination) : undefined; + return message; + } + +}; + +function createBaseQueryConnectionsResponse(): QueryConnectionsResponse { + return { + connections: [], + pagination: undefined, + height: undefined + }; +} + +export const QueryConnectionsResponse = { + encode(message: QueryConnectionsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.connections) { + IdentifiedConnection.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + + if (message.height !== undefined) { + Height.encode(message.height, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConnectionsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.connections.push(IdentifiedConnection.decode(reader, reader.uint32())); + break; + + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + + case 3: + message.height = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConnectionsResponse { + const message = createBaseQueryConnectionsResponse(); + message.connections = object.connections?.map(e => IdentifiedConnection.fromPartial(e)) || []; + message.pagination = object.pagination !== undefined && object.pagination !== null ? PageResponse.fromPartial(object.pagination) : undefined; + message.height = object.height !== undefined && object.height !== null ? Height.fromPartial(object.height) : undefined; + return message; + } + +}; + +function createBaseQueryClientConnectionsRequest(): QueryClientConnectionsRequest { + return { + clientId: "" + }; +} + +export const QueryClientConnectionsRequest = { + encode(message: QueryClientConnectionsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientConnectionsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClientConnectionsRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClientConnectionsRequest { + const message = createBaseQueryClientConnectionsRequest(); + message.clientId = object.clientId ?? ""; + return message; + } + +}; + +function createBaseQueryClientConnectionsResponse(): QueryClientConnectionsResponse { + return { + connectionPaths: [], + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryClientConnectionsResponse = { + encode(message: QueryClientConnectionsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.connectionPaths) { + writer.uint32(10).string(v!); + } + + if (message.proof.length !== 0) { + writer.uint32(18).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryClientConnectionsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryClientConnectionsResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.connectionPaths.push(reader.string()); + break; + + case 2: + message.proof = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryClientConnectionsResponse { + const message = createBaseQueryClientConnectionsResponse(); + message.connectionPaths = object.connectionPaths?.map(e => e) || []; + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; + +function createBaseQueryConnectionClientStateRequest(): QueryConnectionClientStateRequest { + return { + connectionId: "" + }; +} + +export const QueryConnectionClientStateRequest = { + encode(message: QueryConnectionClientStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.connectionId !== "") { + writer.uint32(10).string(message.connectionId); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionClientStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConnectionClientStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.connectionId = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConnectionClientStateRequest { + const message = createBaseQueryConnectionClientStateRequest(); + message.connectionId = object.connectionId ?? ""; + return message; + } + +}; + +function createBaseQueryConnectionClientStateResponse(): QueryConnectionClientStateResponse { + return { + identifiedClientState: undefined, + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryConnectionClientStateResponse = { + encode(message: QueryConnectionClientStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.identifiedClientState !== undefined) { + IdentifiedClientState.encode(message.identifiedClientState, writer.uint32(10).fork()).ldelim(); + } + + if (message.proof.length !== 0) { + writer.uint32(18).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionClientStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConnectionClientStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.identifiedClientState = IdentifiedClientState.decode(reader, reader.uint32()); + break; + + case 2: + message.proof = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConnectionClientStateResponse { + const message = createBaseQueryConnectionClientStateResponse(); + message.identifiedClientState = object.identifiedClientState !== undefined && object.identifiedClientState !== null ? IdentifiedClientState.fromPartial(object.identifiedClientState) : undefined; + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; + +function createBaseQueryConnectionConsensusStateRequest(): QueryConnectionConsensusStateRequest { + return { + connectionId: "", + revisionNumber: Long.UZERO, + revisionHeight: Long.UZERO + }; +} + +export const QueryConnectionConsensusStateRequest = { + encode(message: QueryConnectionConsensusStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.connectionId !== "") { + writer.uint32(10).string(message.connectionId); + } + + if (!message.revisionNumber.isZero()) { + writer.uint32(16).uint64(message.revisionNumber); + } + + if (!message.revisionHeight.isZero()) { + writer.uint32(24).uint64(message.revisionHeight); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionConsensusStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConnectionConsensusStateRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.connectionId = reader.string(); + break; + + case 2: + message.revisionNumber = (reader.uint64() as Long); + break; + + case 3: + message.revisionHeight = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConnectionConsensusStateRequest { + const message = createBaseQueryConnectionConsensusStateRequest(); + message.connectionId = object.connectionId ?? ""; + message.revisionNumber = object.revisionNumber !== undefined && object.revisionNumber !== null ? Long.fromValue(object.revisionNumber) : Long.UZERO; + message.revisionHeight = object.revisionHeight !== undefined && object.revisionHeight !== null ? Long.fromValue(object.revisionHeight) : Long.UZERO; + return message; + } + +}; + +function createBaseQueryConnectionConsensusStateResponse(): QueryConnectionConsensusStateResponse { + return { + consensusState: undefined, + clientId: "", + proof: new Uint8Array(), + proofHeight: undefined + }; +} + +export const QueryConnectionConsensusStateResponse = { + encode(message: QueryConnectionConsensusStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.consensusState !== undefined) { + Any.encode(message.consensusState, writer.uint32(10).fork()).ldelim(); + } + + if (message.clientId !== "") { + writer.uint32(18).string(message.clientId); + } + + if (message.proof.length !== 0) { + writer.uint32(26).bytes(message.proof); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryConnectionConsensusStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryConnectionConsensusStateResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.consensusState = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.clientId = reader.string(); + break; + + case 3: + message.proof = reader.bytes(); + break; + + case 4: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryConnectionConsensusStateResponse { + const message = createBaseQueryConnectionConsensusStateResponse(); + message.consensusState = object.consensusState !== undefined && object.consensusState !== null ? Any.fromPartial(object.consensusState) : undefined; + message.clientId = object.clientId ?? ""; + message.proof = object.proof ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/connection/v1/tx.rpc.msg.ts b/packages/codegen/src/ibc/core/connection/v1/tx.rpc.msg.ts new file mode 100644 index 00000000..bfda3f8c --- /dev/null +++ b/packages/codegen/src/ibc/core/connection/v1/tx.rpc.msg.ts @@ -0,0 +1,57 @@ +import { Rpc } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { MsgConnectionOpenInit, MsgConnectionOpenInitResponse, MsgConnectionOpenTry, MsgConnectionOpenTryResponse, MsgConnectionOpenAck, MsgConnectionOpenAckResponse, MsgConnectionOpenConfirm, MsgConnectionOpenConfirmResponse } from "./tx"; +/** Msg defines the ibc/connection Msg service. */ + +export interface Msg { + /** ConnectionOpenInit defines a rpc handler method for MsgConnectionOpenInit. */ + connectionOpenInit(request: MsgConnectionOpenInit): Promise; + /** ConnectionOpenTry defines a rpc handler method for MsgConnectionOpenTry. */ + + connectionOpenTry(request: MsgConnectionOpenTry): Promise; + /** ConnectionOpenAck defines a rpc handler method for MsgConnectionOpenAck. */ + + connectionOpenAck(request: MsgConnectionOpenAck): Promise; + /** + * ConnectionOpenConfirm defines a rpc handler method for + * MsgConnectionOpenConfirm. + */ + + connectionOpenConfirm(request: MsgConnectionOpenConfirm): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.connectionOpenInit = this.connectionOpenInit.bind(this); + this.connectionOpenTry = this.connectionOpenTry.bind(this); + this.connectionOpenAck = this.connectionOpenAck.bind(this); + this.connectionOpenConfirm = this.connectionOpenConfirm.bind(this); + } + + connectionOpenInit(request: MsgConnectionOpenInit): Promise { + const data = MsgConnectionOpenInit.encode(request).finish(); + const promise = this.rpc.request("ibc.core.connection.v1.Msg", "ConnectionOpenInit", data); + return promise.then(data => MsgConnectionOpenInitResponse.decode(new _m0.Reader(data))); + } + + connectionOpenTry(request: MsgConnectionOpenTry): Promise { + const data = MsgConnectionOpenTry.encode(request).finish(); + const promise = this.rpc.request("ibc.core.connection.v1.Msg", "ConnectionOpenTry", data); + return promise.then(data => MsgConnectionOpenTryResponse.decode(new _m0.Reader(data))); + } + + connectionOpenAck(request: MsgConnectionOpenAck): Promise { + const data = MsgConnectionOpenAck.encode(request).finish(); + const promise = this.rpc.request("ibc.core.connection.v1.Msg", "ConnectionOpenAck", data); + return promise.then(data => MsgConnectionOpenAckResponse.decode(new _m0.Reader(data))); + } + + connectionOpenConfirm(request: MsgConnectionOpenConfirm): Promise { + const data = MsgConnectionOpenConfirm.encode(request).finish(); + const promise = this.rpc.request("ibc.core.connection.v1.Msg", "ConnectionOpenConfirm", data); + return promise.then(data => MsgConnectionOpenConfirmResponse.decode(new _m0.Reader(data))); + } + +} \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/connection/v1/tx.ts b/packages/codegen/src/ibc/core/connection/v1/tx.ts new file mode 100644 index 00000000..de5aea04 --- /dev/null +++ b/packages/codegen/src/ibc/core/connection/v1/tx.ts @@ -0,0 +1,770 @@ +import { Counterparty, CounterpartySDKType, Version, VersionSDKType } from "./connection"; +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { Height, HeightSDKType } from "../../client/v1/client"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * MsgConnectionOpenInit defines the msg sent by an account on Chain A to + * initialize a connection with Chain B. + */ + +export interface MsgConnectionOpenInit { + clientId: string; + counterparty?: Counterparty; + version?: Version; + delayPeriod: Long; + signer: string; +} +/** + * MsgConnectionOpenInit defines the msg sent by an account on Chain A to + * initialize a connection with Chain B. + */ + +export interface MsgConnectionOpenInitSDKType { + client_id: string; + counterparty?: CounterpartySDKType; + version?: VersionSDKType; + delay_period: Long; + signer: string; +} +/** + * MsgConnectionOpenInitResponse defines the Msg/ConnectionOpenInit response + * type. + */ + +export interface MsgConnectionOpenInitResponse {} +/** + * MsgConnectionOpenInitResponse defines the Msg/ConnectionOpenInit response + * type. + */ + +export interface MsgConnectionOpenInitResponseSDKType {} +/** + * MsgConnectionOpenTry defines a msg sent by a Relayer to try to open a + * connection on Chain B. + */ + +export interface MsgConnectionOpenTry { + clientId: string; + /** + * in the case of crossing hello's, when both chains call OpenInit, we need + * the connection identifier of the previous connection in state INIT + */ + + previousConnectionId: string; + clientState?: Any; + counterparty?: Counterparty; + delayPeriod: Long; + counterpartyVersions: Version[]; + proofHeight?: Height; + /** + * proof of the initialization the connection on Chain A: `UNITIALIZED -> + * INIT` + */ + + proofInit: Uint8Array; + /** proof of client state included in message */ + + proofClient: Uint8Array; + /** proof of client consensus state */ + + proofConsensus: Uint8Array; + consensusHeight?: Height; + signer: string; +} +/** + * MsgConnectionOpenTry defines a msg sent by a Relayer to try to open a + * connection on Chain B. + */ + +export interface MsgConnectionOpenTrySDKType { + client_id: string; + previous_connection_id: string; + client_state?: AnySDKType; + counterparty?: CounterpartySDKType; + delay_period: Long; + counterparty_versions: VersionSDKType[]; + proof_height?: HeightSDKType; + proof_init: Uint8Array; + proof_client: Uint8Array; + proof_consensus: Uint8Array; + consensus_height?: HeightSDKType; + signer: string; +} +/** MsgConnectionOpenTryResponse defines the Msg/ConnectionOpenTry response type. */ + +export interface MsgConnectionOpenTryResponse {} +/** MsgConnectionOpenTryResponse defines the Msg/ConnectionOpenTry response type. */ + +export interface MsgConnectionOpenTryResponseSDKType {} +/** + * MsgConnectionOpenAck defines a msg sent by a Relayer to Chain A to + * acknowledge the change of connection state to TRYOPEN on Chain B. + */ + +export interface MsgConnectionOpenAck { + connectionId: string; + counterpartyConnectionId: string; + version?: Version; + clientState?: Any; + proofHeight?: Height; + /** + * proof of the initialization the connection on Chain B: `UNITIALIZED -> + * TRYOPEN` + */ + + proofTry: Uint8Array; + /** proof of client state included in message */ + + proofClient: Uint8Array; + /** proof of client consensus state */ + + proofConsensus: Uint8Array; + consensusHeight?: Height; + signer: string; +} +/** + * MsgConnectionOpenAck defines a msg sent by a Relayer to Chain A to + * acknowledge the change of connection state to TRYOPEN on Chain B. + */ + +export interface MsgConnectionOpenAckSDKType { + connection_id: string; + counterparty_connection_id: string; + version?: VersionSDKType; + client_state?: AnySDKType; + proof_height?: HeightSDKType; + proof_try: Uint8Array; + proof_client: Uint8Array; + proof_consensus: Uint8Array; + consensus_height?: HeightSDKType; + signer: string; +} +/** MsgConnectionOpenAckResponse defines the Msg/ConnectionOpenAck response type. */ + +export interface MsgConnectionOpenAckResponse {} +/** MsgConnectionOpenAckResponse defines the Msg/ConnectionOpenAck response type. */ + +export interface MsgConnectionOpenAckResponseSDKType {} +/** + * MsgConnectionOpenConfirm defines a msg sent by a Relayer to Chain B to + * acknowledge the change of connection state to OPEN on Chain A. + */ + +export interface MsgConnectionOpenConfirm { + connectionId: string; + /** proof for the change of the connection state on Chain A: `INIT -> OPEN` */ + + proofAck: Uint8Array; + proofHeight?: Height; + signer: string; +} +/** + * MsgConnectionOpenConfirm defines a msg sent by a Relayer to Chain B to + * acknowledge the change of connection state to OPEN on Chain A. + */ + +export interface MsgConnectionOpenConfirmSDKType { + connection_id: string; + proof_ack: Uint8Array; + proof_height?: HeightSDKType; + signer: string; +} +/** + * MsgConnectionOpenConfirmResponse defines the Msg/ConnectionOpenConfirm + * response type. + */ + +export interface MsgConnectionOpenConfirmResponse {} +/** + * MsgConnectionOpenConfirmResponse defines the Msg/ConnectionOpenConfirm + * response type. + */ + +export interface MsgConnectionOpenConfirmResponseSDKType {} + +function createBaseMsgConnectionOpenInit(): MsgConnectionOpenInit { + return { + clientId: "", + counterparty: undefined, + version: undefined, + delayPeriod: Long.UZERO, + signer: "" + }; +} + +export const MsgConnectionOpenInit = { + encode(message: MsgConnectionOpenInit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + if (message.counterparty !== undefined) { + Counterparty.encode(message.counterparty, writer.uint32(18).fork()).ldelim(); + } + + if (message.version !== undefined) { + Version.encode(message.version, writer.uint32(26).fork()).ldelim(); + } + + if (!message.delayPeriod.isZero()) { + writer.uint32(32).uint64(message.delayPeriod); + } + + if (message.signer !== "") { + writer.uint32(42).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenInit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgConnectionOpenInit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.counterparty = Counterparty.decode(reader, reader.uint32()); + break; + + case 3: + message.version = Version.decode(reader, reader.uint32()); + break; + + case 4: + message.delayPeriod = (reader.uint64() as Long); + break; + + case 5: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgConnectionOpenInit { + const message = createBaseMsgConnectionOpenInit(); + message.clientId = object.clientId ?? ""; + message.counterparty = object.counterparty !== undefined && object.counterparty !== null ? Counterparty.fromPartial(object.counterparty) : undefined; + message.version = object.version !== undefined && object.version !== null ? Version.fromPartial(object.version) : undefined; + message.delayPeriod = object.delayPeriod !== undefined && object.delayPeriod !== null ? Long.fromValue(object.delayPeriod) : Long.UZERO; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgConnectionOpenInitResponse(): MsgConnectionOpenInitResponse { + return {}; +} + +export const MsgConnectionOpenInitResponse = { + encode(_: MsgConnectionOpenInitResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenInitResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgConnectionOpenInitResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgConnectionOpenInitResponse { + const message = createBaseMsgConnectionOpenInitResponse(); + return message; + } + +}; + +function createBaseMsgConnectionOpenTry(): MsgConnectionOpenTry { + return { + clientId: "", + previousConnectionId: "", + clientState: undefined, + counterparty: undefined, + delayPeriod: Long.UZERO, + counterpartyVersions: [], + proofHeight: undefined, + proofInit: new Uint8Array(), + proofClient: new Uint8Array(), + proofConsensus: new Uint8Array(), + consensusHeight: undefined, + signer: "" + }; +} + +export const MsgConnectionOpenTry = { + encode(message: MsgConnectionOpenTry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + if (message.previousConnectionId !== "") { + writer.uint32(18).string(message.previousConnectionId); + } + + if (message.clientState !== undefined) { + Any.encode(message.clientState, writer.uint32(26).fork()).ldelim(); + } + + if (message.counterparty !== undefined) { + Counterparty.encode(message.counterparty, writer.uint32(34).fork()).ldelim(); + } + + if (!message.delayPeriod.isZero()) { + writer.uint32(40).uint64(message.delayPeriod); + } + + for (const v of message.counterpartyVersions) { + Version.encode(v!, writer.uint32(50).fork()).ldelim(); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(58).fork()).ldelim(); + } + + if (message.proofInit.length !== 0) { + writer.uint32(66).bytes(message.proofInit); + } + + if (message.proofClient.length !== 0) { + writer.uint32(74).bytes(message.proofClient); + } + + if (message.proofConsensus.length !== 0) { + writer.uint32(82).bytes(message.proofConsensus); + } + + if (message.consensusHeight !== undefined) { + Height.encode(message.consensusHeight, writer.uint32(90).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(98).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenTry { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgConnectionOpenTry(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.previousConnectionId = reader.string(); + break; + + case 3: + message.clientState = Any.decode(reader, reader.uint32()); + break; + + case 4: + message.counterparty = Counterparty.decode(reader, reader.uint32()); + break; + + case 5: + message.delayPeriod = (reader.uint64() as Long); + break; + + case 6: + message.counterpartyVersions.push(Version.decode(reader, reader.uint32())); + break; + + case 7: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + case 8: + message.proofInit = reader.bytes(); + break; + + case 9: + message.proofClient = reader.bytes(); + break; + + case 10: + message.proofConsensus = reader.bytes(); + break; + + case 11: + message.consensusHeight = Height.decode(reader, reader.uint32()); + break; + + case 12: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgConnectionOpenTry { + const message = createBaseMsgConnectionOpenTry(); + message.clientId = object.clientId ?? ""; + message.previousConnectionId = object.previousConnectionId ?? ""; + message.clientState = object.clientState !== undefined && object.clientState !== null ? Any.fromPartial(object.clientState) : undefined; + message.counterparty = object.counterparty !== undefined && object.counterparty !== null ? Counterparty.fromPartial(object.counterparty) : undefined; + message.delayPeriod = object.delayPeriod !== undefined && object.delayPeriod !== null ? Long.fromValue(object.delayPeriod) : Long.UZERO; + message.counterpartyVersions = object.counterpartyVersions?.map(e => Version.fromPartial(e)) || []; + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + message.proofInit = object.proofInit ?? new Uint8Array(); + message.proofClient = object.proofClient ?? new Uint8Array(); + message.proofConsensus = object.proofConsensus ?? new Uint8Array(); + message.consensusHeight = object.consensusHeight !== undefined && object.consensusHeight !== null ? Height.fromPartial(object.consensusHeight) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgConnectionOpenTryResponse(): MsgConnectionOpenTryResponse { + return {}; +} + +export const MsgConnectionOpenTryResponse = { + encode(_: MsgConnectionOpenTryResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenTryResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgConnectionOpenTryResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgConnectionOpenTryResponse { + const message = createBaseMsgConnectionOpenTryResponse(); + return message; + } + +}; + +function createBaseMsgConnectionOpenAck(): MsgConnectionOpenAck { + return { + connectionId: "", + counterpartyConnectionId: "", + version: undefined, + clientState: undefined, + proofHeight: undefined, + proofTry: new Uint8Array(), + proofClient: new Uint8Array(), + proofConsensus: new Uint8Array(), + consensusHeight: undefined, + signer: "" + }; +} + +export const MsgConnectionOpenAck = { + encode(message: MsgConnectionOpenAck, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.connectionId !== "") { + writer.uint32(10).string(message.connectionId); + } + + if (message.counterpartyConnectionId !== "") { + writer.uint32(18).string(message.counterpartyConnectionId); + } + + if (message.version !== undefined) { + Version.encode(message.version, writer.uint32(26).fork()).ldelim(); + } + + if (message.clientState !== undefined) { + Any.encode(message.clientState, writer.uint32(34).fork()).ldelim(); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(42).fork()).ldelim(); + } + + if (message.proofTry.length !== 0) { + writer.uint32(50).bytes(message.proofTry); + } + + if (message.proofClient.length !== 0) { + writer.uint32(58).bytes(message.proofClient); + } + + if (message.proofConsensus.length !== 0) { + writer.uint32(66).bytes(message.proofConsensus); + } + + if (message.consensusHeight !== undefined) { + Height.encode(message.consensusHeight, writer.uint32(74).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(82).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenAck { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgConnectionOpenAck(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.connectionId = reader.string(); + break; + + case 2: + message.counterpartyConnectionId = reader.string(); + break; + + case 3: + message.version = Version.decode(reader, reader.uint32()); + break; + + case 4: + message.clientState = Any.decode(reader, reader.uint32()); + break; + + case 5: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + case 6: + message.proofTry = reader.bytes(); + break; + + case 7: + message.proofClient = reader.bytes(); + break; + + case 8: + message.proofConsensus = reader.bytes(); + break; + + case 9: + message.consensusHeight = Height.decode(reader, reader.uint32()); + break; + + case 10: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgConnectionOpenAck { + const message = createBaseMsgConnectionOpenAck(); + message.connectionId = object.connectionId ?? ""; + message.counterpartyConnectionId = object.counterpartyConnectionId ?? ""; + message.version = object.version !== undefined && object.version !== null ? Version.fromPartial(object.version) : undefined; + message.clientState = object.clientState !== undefined && object.clientState !== null ? Any.fromPartial(object.clientState) : undefined; + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + message.proofTry = object.proofTry ?? new Uint8Array(); + message.proofClient = object.proofClient ?? new Uint8Array(); + message.proofConsensus = object.proofConsensus ?? new Uint8Array(); + message.consensusHeight = object.consensusHeight !== undefined && object.consensusHeight !== null ? Height.fromPartial(object.consensusHeight) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgConnectionOpenAckResponse(): MsgConnectionOpenAckResponse { + return {}; +} + +export const MsgConnectionOpenAckResponse = { + encode(_: MsgConnectionOpenAckResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenAckResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgConnectionOpenAckResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgConnectionOpenAckResponse { + const message = createBaseMsgConnectionOpenAckResponse(); + return message; + } + +}; + +function createBaseMsgConnectionOpenConfirm(): MsgConnectionOpenConfirm { + return { + connectionId: "", + proofAck: new Uint8Array(), + proofHeight: undefined, + signer: "" + }; +} + +export const MsgConnectionOpenConfirm = { + encode(message: MsgConnectionOpenConfirm, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.connectionId !== "") { + writer.uint32(10).string(message.connectionId); + } + + if (message.proofAck.length !== 0) { + writer.uint32(18).bytes(message.proofAck); + } + + if (message.proofHeight !== undefined) { + Height.encode(message.proofHeight, writer.uint32(26).fork()).ldelim(); + } + + if (message.signer !== "") { + writer.uint32(34).string(message.signer); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenConfirm { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgConnectionOpenConfirm(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.connectionId = reader.string(); + break; + + case 2: + message.proofAck = reader.bytes(); + break; + + case 3: + message.proofHeight = Height.decode(reader, reader.uint32()); + break; + + case 4: + message.signer = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): MsgConnectionOpenConfirm { + const message = createBaseMsgConnectionOpenConfirm(); + message.connectionId = object.connectionId ?? ""; + message.proofAck = object.proofAck ?? new Uint8Array(); + message.proofHeight = object.proofHeight !== undefined && object.proofHeight !== null ? Height.fromPartial(object.proofHeight) : undefined; + message.signer = object.signer ?? ""; + return message; + } + +}; + +function createBaseMsgConnectionOpenConfirmResponse(): MsgConnectionOpenConfirmResponse { + return {}; +} + +export const MsgConnectionOpenConfirmResponse = { + encode(_: MsgConnectionOpenConfirmResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgConnectionOpenConfirmResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgConnectionOpenConfirmResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): MsgConnectionOpenConfirmResponse { + const message = createBaseMsgConnectionOpenConfirmResponse(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/port/v1/query.rpc.Query.ts b/packages/codegen/src/ibc/core/port/v1/query.rpc.Query.ts new file mode 100644 index 00000000..df3c124c --- /dev/null +++ b/packages/codegen/src/ibc/core/port/v1/query.rpc.Query.ts @@ -0,0 +1,35 @@ +import { Rpc } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +import { QueryClient, createProtobufRpcClient } from "@cosmjs/stargate"; +import { QueryAppVersionRequest, QueryAppVersionResponse } from "./query"; +/** Query defines the gRPC querier service */ + +export interface Query { + /** AppVersion queries an IBC Port and determines the appropriate application version to be used */ + appVersion(request: QueryAppVersionRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + + constructor(rpc: Rpc) { + this.rpc = rpc; + this.appVersion = this.appVersion.bind(this); + } + + appVersion(request: QueryAppVersionRequest): Promise { + const data = QueryAppVersionRequest.encode(request).finish(); + const promise = this.rpc.request("ibc.core.port.v1.Query", "AppVersion", data); + return promise.then(data => QueryAppVersionResponse.decode(new _m0.Reader(data))); + } + +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + appVersion(request: QueryAppVersionRequest): Promise { + return queryService.appVersion(request); + } + + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/port/v1/query.ts b/packages/codegen/src/ibc/core/port/v1/query.ts new file mode 100644 index 00000000..3f3a0e4a --- /dev/null +++ b/packages/codegen/src/ibc/core/port/v1/query.ts @@ -0,0 +1,185 @@ +import { Order, Counterparty, CounterpartySDKType } from "../../channel/v1/channel"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** QueryAppVersionRequest is the request type for the Query/AppVersion RPC method */ + +export interface QueryAppVersionRequest { + /** port unique identifier */ + portId: string; + /** connection unique identifier */ + + connectionId: string; + /** whether the channel is ordered or unordered */ + + ordering: Order; + /** counterparty channel end */ + + counterparty?: Counterparty; + /** proposed version */ + + proposedVersion: string; +} +/** QueryAppVersionRequest is the request type for the Query/AppVersion RPC method */ + +export interface QueryAppVersionRequestSDKType { + port_id: string; + connection_id: string; + ordering: Order; + counterparty?: CounterpartySDKType; + proposed_version: string; +} +/** QueryAppVersionResponse is the response type for the Query/AppVersion RPC method. */ + +export interface QueryAppVersionResponse { + /** port id associated with the request identifiers */ + portId: string; + /** supported app version */ + + version: string; +} +/** QueryAppVersionResponse is the response type for the Query/AppVersion RPC method. */ + +export interface QueryAppVersionResponseSDKType { + port_id: string; + version: string; +} + +function createBaseQueryAppVersionRequest(): QueryAppVersionRequest { + return { + portId: "", + connectionId: "", + ordering: 0, + counterparty: undefined, + proposedVersion: "" + }; +} + +export const QueryAppVersionRequest = { + encode(message: QueryAppVersionRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.connectionId !== "") { + writer.uint32(18).string(message.connectionId); + } + + if (message.ordering !== 0) { + writer.uint32(24).int32(message.ordering); + } + + if (message.counterparty !== undefined) { + Counterparty.encode(message.counterparty, writer.uint32(34).fork()).ldelim(); + } + + if (message.proposedVersion !== "") { + writer.uint32(42).string(message.proposedVersion); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAppVersionRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAppVersionRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.connectionId = reader.string(); + break; + + case 3: + message.ordering = (reader.int32() as any); + break; + + case 4: + message.counterparty = Counterparty.decode(reader, reader.uint32()); + break; + + case 5: + message.proposedVersion = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAppVersionRequest { + const message = createBaseQueryAppVersionRequest(); + message.portId = object.portId ?? ""; + message.connectionId = object.connectionId ?? ""; + message.ordering = object.ordering ?? 0; + message.counterparty = object.counterparty !== undefined && object.counterparty !== null ? Counterparty.fromPartial(object.counterparty) : undefined; + message.proposedVersion = object.proposedVersion ?? ""; + return message; + } + +}; + +function createBaseQueryAppVersionResponse(): QueryAppVersionResponse { + return { + portId: "", + version: "" + }; +} + +export const QueryAppVersionResponse = { + encode(message: QueryAppVersionResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.portId !== "") { + writer.uint32(10).string(message.portId); + } + + if (message.version !== "") { + writer.uint32(18).string(message.version); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAppVersionResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAppVersionResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + + case 2: + message.version = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): QueryAppVersionResponse { + const message = createBaseQueryAppVersionResponse(); + message.portId = object.portId ?? ""; + message.version = object.version ?? ""; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/core/types/v1/genesis.ts b/packages/codegen/src/ibc/core/types/v1/genesis.ts new file mode 100644 index 00000000..8e01c49f --- /dev/null +++ b/packages/codegen/src/ibc/core/types/v1/genesis.ts @@ -0,0 +1,92 @@ +import { GenesisState as GenesisState1 } from "../../client/v1/genesis"; +import { GenesisStateSDKType as GenesisState1SDKType } from "../../client/v1/genesis"; +import { GenesisState as GenesisState2 } from "../../connection/v1/genesis"; +import { GenesisStateSDKType as GenesisState2SDKType } from "../../connection/v1/genesis"; +import { GenesisState as GenesisState3 } from "../../channel/v1/genesis"; +import { GenesisStateSDKType as GenesisState3SDKType } from "../../channel/v1/genesis"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** GenesisState defines the ibc module's genesis state. */ + +export interface GenesisState { + /** ICS002 - Clients genesis state */ + clientGenesis?: GenesisState1; + /** ICS003 - Connections genesis state */ + + connectionGenesis?: GenesisState2; + /** ICS004 - Channel genesis state */ + + channelGenesis?: GenesisState3; +} +/** GenesisState defines the ibc module's genesis state. */ + +export interface GenesisStateSDKType { + client_genesis?: GenesisState1SDKType; + connection_genesis?: GenesisState2SDKType; + channel_genesis?: GenesisState3SDKType; +} + +function createBaseGenesisState(): GenesisState { + return { + clientGenesis: undefined, + connectionGenesis: undefined, + channelGenesis: undefined + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientGenesis !== undefined) { + GenesisState1.encode(message.clientGenesis, writer.uint32(10).fork()).ldelim(); + } + + if (message.connectionGenesis !== undefined) { + GenesisState2.encode(message.connectionGenesis, writer.uint32(18).fork()).ldelim(); + } + + if (message.channelGenesis !== undefined) { + GenesisState3.encode(message.channelGenesis, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientGenesis = GenesisState1.decode(reader, reader.uint32()); + break; + + case 2: + message.connectionGenesis = GenesisState2.decode(reader, reader.uint32()); + break; + + case 3: + message.channelGenesis = GenesisState3.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): GenesisState { + const message = createBaseGenesisState(); + message.clientGenesis = object.clientGenesis !== undefined && object.clientGenesis !== null ? GenesisState1.fromPartial(object.clientGenesis) : undefined; + message.connectionGenesis = object.connectionGenesis !== undefined && object.connectionGenesis !== null ? GenesisState2.fromPartial(object.connectionGenesis) : undefined; + message.channelGenesis = object.channelGenesis !== undefined && object.channelGenesis !== null ? GenesisState3.fromPartial(object.channelGenesis) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/lcd.ts b/packages/codegen/src/ibc/lcd.ts new file mode 100644 index 00000000..100c9f9c --- /dev/null +++ b/packages/codegen/src/ibc/lcd.ts @@ -0,0 +1,125 @@ +import { LCDClient } from "@osmonauts/lcd"; +export const createLCDClient = async ({ + restEndpoint +}: { + restEndpoint: string; +}) => { + const requestClient = new LCDClient({ + restEndpoint + }); + return { + cosmos: { + auth: { + v1beta1: new (await import("../cosmos/auth/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + authz: { + v1beta1: new (await import("../cosmos/authz/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + bank: { + v1beta1: new (await import("../cosmos/bank/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + base: { + tendermint: { + v1beta1: new (await import("../cosmos/base/tendermint/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + } + }, + distribution: { + v1beta1: new (await import("../cosmos/distribution/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + evidence: { + v1beta1: new (await import("../cosmos/evidence/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + feegrant: { + v1beta1: new (await import("../cosmos/feegrant/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + gov: { + v1: new (await import("../cosmos/gov/v1/query.lcd")).LCDQueryClient({ + requestClient + }), + v1beta1: new (await import("../cosmos/gov/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + group: { + v1: new (await import("../cosmos/group/v1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + mint: { + v1beta1: new (await import("../cosmos/mint/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + nft: { + v1beta1: new (await import("../cosmos/nft/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + params: { + v1beta1: new (await import("../cosmos/params/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + slashing: { + v1beta1: new (await import("../cosmos/slashing/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + staking: { + v1beta1: new (await import("../cosmos/staking/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + tx: { + v1beta1: new (await import("../cosmos/tx/v1beta1/service.lcd")).LCDQueryClient({ + requestClient + }) + }, + upgrade: { + v1beta1: new (await import("../cosmos/upgrade/v1beta1/query.lcd")).LCDQueryClient({ + requestClient + }) + } + }, + ibc: { + applications: { + transfer: { + v1: new (await import("./applications/transfer/v1/query.lcd")).LCDQueryClient({ + requestClient + }) + } + }, + core: { + channel: { + v1: new (await import("./core/channel/v1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + client: { + v1: new (await import("./core/client/v1/query.lcd")).LCDQueryClient({ + requestClient + }) + }, + connection: { + v1: new (await import("./core/connection/v1/query.lcd")).LCDQueryClient({ + requestClient + }) + } + } + } + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/lightclients/localhost/v1/localhost.ts b/packages/codegen/src/ibc/lightclients/localhost/v1/localhost.ts new file mode 100644 index 00000000..65e9e11d --- /dev/null +++ b/packages/codegen/src/ibc/lightclients/localhost/v1/localhost.ts @@ -0,0 +1,79 @@ +import { Height, HeightSDKType } from "../../../core/client/v1/client"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../../../helpers"; +/** + * ClientState defines a loopback (localhost) client. It requires (read-only) + * access to keys outside the client prefix. + */ + +export interface ClientState { + /** self chain ID */ + chainId: string; + /** self latest block height */ + + height?: Height; +} +/** + * ClientState defines a loopback (localhost) client. It requires (read-only) + * access to keys outside the client prefix. + */ + +export interface ClientStateSDKType { + chain_id: string; + height?: HeightSDKType; +} + +function createBaseClientState(): ClientState { + return { + chainId: "", + height: undefined + }; +} + +export const ClientState = { + encode(message: ClientState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.chainId !== "") { + writer.uint32(10).string(message.chainId); + } + + if (message.height !== undefined) { + Height.encode(message.height, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ClientState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.chainId = reader.string(); + break; + + case 2: + message.height = Height.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ClientState { + const message = createBaseClientState(); + message.chainId = object.chainId ?? ""; + message.height = object.height !== undefined && object.height !== null ? Height.fromPartial(object.height) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/lightclients/solomachine/v1/solomachine.ts b/packages/codegen/src/ibc/lightclients/solomachine/v1/solomachine.ts new file mode 100644 index 00000000..1ba467ce --- /dev/null +++ b/packages/codegen/src/ibc/lightclients/solomachine/v1/solomachine.ts @@ -0,0 +1,1441 @@ +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { ConnectionEnd, ConnectionEndSDKType } from "../../../core/connection/v1/connection"; +import { Channel, ChannelSDKType } from "../../../core/channel/v1/channel"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * DataType defines the type of solo machine proof being created. This is done + * to preserve uniqueness of different data sign byte encodings. + */ + +export enum DataType { + /** DATA_TYPE_UNINITIALIZED_UNSPECIFIED - Default State */ + DATA_TYPE_UNINITIALIZED_UNSPECIFIED = 0, + + /** DATA_TYPE_CLIENT_STATE - Data type for client state verification */ + DATA_TYPE_CLIENT_STATE = 1, + + /** DATA_TYPE_CONSENSUS_STATE - Data type for consensus state verification */ + DATA_TYPE_CONSENSUS_STATE = 2, + + /** DATA_TYPE_CONNECTION_STATE - Data type for connection state verification */ + DATA_TYPE_CONNECTION_STATE = 3, + + /** DATA_TYPE_CHANNEL_STATE - Data type for channel state verification */ + DATA_TYPE_CHANNEL_STATE = 4, + + /** DATA_TYPE_PACKET_COMMITMENT - Data type for packet commitment verification */ + DATA_TYPE_PACKET_COMMITMENT = 5, + + /** DATA_TYPE_PACKET_ACKNOWLEDGEMENT - Data type for packet acknowledgement verification */ + DATA_TYPE_PACKET_ACKNOWLEDGEMENT = 6, + + /** DATA_TYPE_PACKET_RECEIPT_ABSENCE - Data type for packet receipt absence verification */ + DATA_TYPE_PACKET_RECEIPT_ABSENCE = 7, + + /** DATA_TYPE_NEXT_SEQUENCE_RECV - Data type for next sequence recv verification */ + DATA_TYPE_NEXT_SEQUENCE_RECV = 8, + + /** DATA_TYPE_HEADER - Data type for header verification */ + DATA_TYPE_HEADER = 9, + UNRECOGNIZED = -1, +} +export const DataTypeSDKType = DataType; +export function dataTypeFromJSON(object: any): DataType { + switch (object) { + case 0: + case "DATA_TYPE_UNINITIALIZED_UNSPECIFIED": + return DataType.DATA_TYPE_UNINITIALIZED_UNSPECIFIED; + + case 1: + case "DATA_TYPE_CLIENT_STATE": + return DataType.DATA_TYPE_CLIENT_STATE; + + case 2: + case "DATA_TYPE_CONSENSUS_STATE": + return DataType.DATA_TYPE_CONSENSUS_STATE; + + case 3: + case "DATA_TYPE_CONNECTION_STATE": + return DataType.DATA_TYPE_CONNECTION_STATE; + + case 4: + case "DATA_TYPE_CHANNEL_STATE": + return DataType.DATA_TYPE_CHANNEL_STATE; + + case 5: + case "DATA_TYPE_PACKET_COMMITMENT": + return DataType.DATA_TYPE_PACKET_COMMITMENT; + + case 6: + case "DATA_TYPE_PACKET_ACKNOWLEDGEMENT": + return DataType.DATA_TYPE_PACKET_ACKNOWLEDGEMENT; + + case 7: + case "DATA_TYPE_PACKET_RECEIPT_ABSENCE": + return DataType.DATA_TYPE_PACKET_RECEIPT_ABSENCE; + + case 8: + case "DATA_TYPE_NEXT_SEQUENCE_RECV": + return DataType.DATA_TYPE_NEXT_SEQUENCE_RECV; + + case 9: + case "DATA_TYPE_HEADER": + return DataType.DATA_TYPE_HEADER; + + case -1: + case "UNRECOGNIZED": + default: + return DataType.UNRECOGNIZED; + } +} +export function dataTypeToJSON(object: DataType): string { + switch (object) { + case DataType.DATA_TYPE_UNINITIALIZED_UNSPECIFIED: + return "DATA_TYPE_UNINITIALIZED_UNSPECIFIED"; + + case DataType.DATA_TYPE_CLIENT_STATE: + return "DATA_TYPE_CLIENT_STATE"; + + case DataType.DATA_TYPE_CONSENSUS_STATE: + return "DATA_TYPE_CONSENSUS_STATE"; + + case DataType.DATA_TYPE_CONNECTION_STATE: + return "DATA_TYPE_CONNECTION_STATE"; + + case DataType.DATA_TYPE_CHANNEL_STATE: + return "DATA_TYPE_CHANNEL_STATE"; + + case DataType.DATA_TYPE_PACKET_COMMITMENT: + return "DATA_TYPE_PACKET_COMMITMENT"; + + case DataType.DATA_TYPE_PACKET_ACKNOWLEDGEMENT: + return "DATA_TYPE_PACKET_ACKNOWLEDGEMENT"; + + case DataType.DATA_TYPE_PACKET_RECEIPT_ABSENCE: + return "DATA_TYPE_PACKET_RECEIPT_ABSENCE"; + + case DataType.DATA_TYPE_NEXT_SEQUENCE_RECV: + return "DATA_TYPE_NEXT_SEQUENCE_RECV"; + + case DataType.DATA_TYPE_HEADER: + return "DATA_TYPE_HEADER"; + + case DataType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * ClientState defines a solo machine client that tracks the current consensus + * state and if the client is frozen. + */ + +export interface ClientState { + /** latest sequence of the client state */ + sequence: Long; + /** frozen sequence of the solo machine */ + + frozenSequence: Long; + consensusState?: ConsensusState; + /** + * when set to true, will allow governance to update a solo machine client. + * The client will be unfrozen if it is frozen. + */ + + allowUpdateAfterProposal: boolean; +} +/** + * ClientState defines a solo machine client that tracks the current consensus + * state and if the client is frozen. + */ + +export interface ClientStateSDKType { + sequence: Long; + frozen_sequence: Long; + consensus_state?: ConsensusStateSDKType; + allow_update_after_proposal: boolean; +} +/** + * ConsensusState defines a solo machine consensus state. The sequence of a + * consensus state is contained in the "height" key used in storing the + * consensus state. + */ + +export interface ConsensusState { + /** public key of the solo machine */ + publicKey?: Any; + /** + * diversifier allows the same public key to be re-used across different solo + * machine clients (potentially on different chains) without being considered + * misbehaviour. + */ + + diversifier: string; + timestamp: Long; +} +/** + * ConsensusState defines a solo machine consensus state. The sequence of a + * consensus state is contained in the "height" key used in storing the + * consensus state. + */ + +export interface ConsensusStateSDKType { + public_key?: AnySDKType; + diversifier: string; + timestamp: Long; +} +/** Header defines a solo machine consensus header */ + +export interface Header { + /** sequence to update solo machine public key at */ + sequence: Long; + timestamp: Long; + signature: Uint8Array; + newPublicKey?: Any; + newDiversifier: string; +} +/** Header defines a solo machine consensus header */ + +export interface HeaderSDKType { + sequence: Long; + timestamp: Long; + signature: Uint8Array; + new_public_key?: AnySDKType; + new_diversifier: string; +} +/** + * Misbehaviour defines misbehaviour for a solo machine which consists + * of a sequence and two signatures over different messages at that sequence. + */ + +export interface Misbehaviour { + clientId: string; + sequence: Long; + signatureOne?: SignatureAndData; + signatureTwo?: SignatureAndData; +} +/** + * Misbehaviour defines misbehaviour for a solo machine which consists + * of a sequence and two signatures over different messages at that sequence. + */ + +export interface MisbehaviourSDKType { + client_id: string; + sequence: Long; + signature_one?: SignatureAndDataSDKType; + signature_two?: SignatureAndDataSDKType; +} +/** + * SignatureAndData contains a signature and the data signed over to create that + * signature. + */ + +export interface SignatureAndData { + signature: Uint8Array; + dataType: DataType; + data: Uint8Array; + timestamp: Long; +} +/** + * SignatureAndData contains a signature and the data signed over to create that + * signature. + */ + +export interface SignatureAndDataSDKType { + signature: Uint8Array; + data_type: DataType; + data: Uint8Array; + timestamp: Long; +} +/** + * TimestampedSignatureData contains the signature data and the timestamp of the + * signature. + */ + +export interface TimestampedSignatureData { + signatureData: Uint8Array; + timestamp: Long; +} +/** + * TimestampedSignatureData contains the signature data and the timestamp of the + * signature. + */ + +export interface TimestampedSignatureDataSDKType { + signature_data: Uint8Array; + timestamp: Long; +} +/** SignBytes defines the signed bytes used for signature verification. */ + +export interface SignBytes { + sequence: Long; + timestamp: Long; + diversifier: string; + /** type of the data used */ + + dataType: DataType; + /** marshaled data */ + + data: Uint8Array; +} +/** SignBytes defines the signed bytes used for signature verification. */ + +export interface SignBytesSDKType { + sequence: Long; + timestamp: Long; + diversifier: string; + data_type: DataType; + data: Uint8Array; +} +/** HeaderData returns the SignBytes data for update verification. */ + +export interface HeaderData { + /** header public key */ + newPubKey?: Any; + /** header diversifier */ + + newDiversifier: string; +} +/** HeaderData returns the SignBytes data for update verification. */ + +export interface HeaderDataSDKType { + new_pub_key?: AnySDKType; + new_diversifier: string; +} +/** ClientStateData returns the SignBytes data for client state verification. */ + +export interface ClientStateData { + path: Uint8Array; + clientState?: Any; +} +/** ClientStateData returns the SignBytes data for client state verification. */ + +export interface ClientStateDataSDKType { + path: Uint8Array; + client_state?: AnySDKType; +} +/** + * ConsensusStateData returns the SignBytes data for consensus state + * verification. + */ + +export interface ConsensusStateData { + path: Uint8Array; + consensusState?: Any; +} +/** + * ConsensusStateData returns the SignBytes data for consensus state + * verification. + */ + +export interface ConsensusStateDataSDKType { + path: Uint8Array; + consensus_state?: AnySDKType; +} +/** + * ConnectionStateData returns the SignBytes data for connection state + * verification. + */ + +export interface ConnectionStateData { + path: Uint8Array; + connection?: ConnectionEnd; +} +/** + * ConnectionStateData returns the SignBytes data for connection state + * verification. + */ + +export interface ConnectionStateDataSDKType { + path: Uint8Array; + connection?: ConnectionEndSDKType; +} +/** + * ChannelStateData returns the SignBytes data for channel state + * verification. + */ + +export interface ChannelStateData { + path: Uint8Array; + channel?: Channel; +} +/** + * ChannelStateData returns the SignBytes data for channel state + * verification. + */ + +export interface ChannelStateDataSDKType { + path: Uint8Array; + channel?: ChannelSDKType; +} +/** + * PacketCommitmentData returns the SignBytes data for packet commitment + * verification. + */ + +export interface PacketCommitmentData { + path: Uint8Array; + commitment: Uint8Array; +} +/** + * PacketCommitmentData returns the SignBytes data for packet commitment + * verification. + */ + +export interface PacketCommitmentDataSDKType { + path: Uint8Array; + commitment: Uint8Array; +} +/** + * PacketAcknowledgementData returns the SignBytes data for acknowledgement + * verification. + */ + +export interface PacketAcknowledgementData { + path: Uint8Array; + acknowledgement: Uint8Array; +} +/** + * PacketAcknowledgementData returns the SignBytes data for acknowledgement + * verification. + */ + +export interface PacketAcknowledgementDataSDKType { + path: Uint8Array; + acknowledgement: Uint8Array; +} +/** + * PacketReceiptAbsenceData returns the SignBytes data for + * packet receipt absence verification. + */ + +export interface PacketReceiptAbsenceData { + path: Uint8Array; +} +/** + * PacketReceiptAbsenceData returns the SignBytes data for + * packet receipt absence verification. + */ + +export interface PacketReceiptAbsenceDataSDKType { + path: Uint8Array; +} +/** + * NextSequenceRecvData returns the SignBytes data for verification of the next + * sequence to be received. + */ + +export interface NextSequenceRecvData { + path: Uint8Array; + nextSeqRecv: Long; +} +/** + * NextSequenceRecvData returns the SignBytes data for verification of the next + * sequence to be received. + */ + +export interface NextSequenceRecvDataSDKType { + path: Uint8Array; + next_seq_recv: Long; +} + +function createBaseClientState(): ClientState { + return { + sequence: Long.UZERO, + frozenSequence: Long.UZERO, + consensusState: undefined, + allowUpdateAfterProposal: false + }; +} + +export const ClientState = { + encode(message: ClientState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.sequence.isZero()) { + writer.uint32(8).uint64(message.sequence); + } + + if (!message.frozenSequence.isZero()) { + writer.uint32(16).uint64(message.frozenSequence); + } + + if (message.consensusState !== undefined) { + ConsensusState.encode(message.consensusState, writer.uint32(26).fork()).ldelim(); + } + + if (message.allowUpdateAfterProposal === true) { + writer.uint32(32).bool(message.allowUpdateAfterProposal); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ClientState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sequence = (reader.uint64() as Long); + break; + + case 2: + message.frozenSequence = (reader.uint64() as Long); + break; + + case 3: + message.consensusState = ConsensusState.decode(reader, reader.uint32()); + break; + + case 4: + message.allowUpdateAfterProposal = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ClientState { + const message = createBaseClientState(); + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + message.frozenSequence = object.frozenSequence !== undefined && object.frozenSequence !== null ? Long.fromValue(object.frozenSequence) : Long.UZERO; + message.consensusState = object.consensusState !== undefined && object.consensusState !== null ? ConsensusState.fromPartial(object.consensusState) : undefined; + message.allowUpdateAfterProposal = object.allowUpdateAfterProposal ?? false; + return message; + } + +}; + +function createBaseConsensusState(): ConsensusState { + return { + publicKey: undefined, + diversifier: "", + timestamp: Long.UZERO + }; +} + +export const ConsensusState = { + encode(message: ConsensusState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.publicKey !== undefined) { + Any.encode(message.publicKey, writer.uint32(10).fork()).ldelim(); + } + + if (message.diversifier !== "") { + writer.uint32(18).string(message.diversifier); + } + + if (!message.timestamp.isZero()) { + writer.uint32(24).uint64(message.timestamp); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensusState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.publicKey = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.diversifier = reader.string(); + break; + + case 3: + message.timestamp = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConsensusState { + const message = createBaseConsensusState(); + message.publicKey = object.publicKey !== undefined && object.publicKey !== null ? Any.fromPartial(object.publicKey) : undefined; + message.diversifier = object.diversifier ?? ""; + message.timestamp = object.timestamp !== undefined && object.timestamp !== null ? Long.fromValue(object.timestamp) : Long.UZERO; + return message; + } + +}; + +function createBaseHeader(): Header { + return { + sequence: Long.UZERO, + timestamp: Long.UZERO, + signature: new Uint8Array(), + newPublicKey: undefined, + newDiversifier: "" + }; +} + +export const Header = { + encode(message: Header, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.sequence.isZero()) { + writer.uint32(8).uint64(message.sequence); + } + + if (!message.timestamp.isZero()) { + writer.uint32(16).uint64(message.timestamp); + } + + if (message.signature.length !== 0) { + writer.uint32(26).bytes(message.signature); + } + + if (message.newPublicKey !== undefined) { + Any.encode(message.newPublicKey, writer.uint32(34).fork()).ldelim(); + } + + if (message.newDiversifier !== "") { + writer.uint32(42).string(message.newDiversifier); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Header { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHeader(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sequence = (reader.uint64() as Long); + break; + + case 2: + message.timestamp = (reader.uint64() as Long); + break; + + case 3: + message.signature = reader.bytes(); + break; + + case 4: + message.newPublicKey = Any.decode(reader, reader.uint32()); + break; + + case 5: + message.newDiversifier = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial
): Header { + const message = createBaseHeader(); + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + message.timestamp = object.timestamp !== undefined && object.timestamp !== null ? Long.fromValue(object.timestamp) : Long.UZERO; + message.signature = object.signature ?? new Uint8Array(); + message.newPublicKey = object.newPublicKey !== undefined && object.newPublicKey !== null ? Any.fromPartial(object.newPublicKey) : undefined; + message.newDiversifier = object.newDiversifier ?? ""; + return message; + } + +}; + +function createBaseMisbehaviour(): Misbehaviour { + return { + clientId: "", + sequence: Long.UZERO, + signatureOne: undefined, + signatureTwo: undefined + }; +} + +export const Misbehaviour = { + encode(message: Misbehaviour, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + if (!message.sequence.isZero()) { + writer.uint32(16).uint64(message.sequence); + } + + if (message.signatureOne !== undefined) { + SignatureAndData.encode(message.signatureOne, writer.uint32(26).fork()).ldelim(); + } + + if (message.signatureTwo !== undefined) { + SignatureAndData.encode(message.signatureTwo, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Misbehaviour { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMisbehaviour(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.sequence = (reader.uint64() as Long); + break; + + case 3: + message.signatureOne = SignatureAndData.decode(reader, reader.uint32()); + break; + + case 4: + message.signatureTwo = SignatureAndData.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Misbehaviour { + const message = createBaseMisbehaviour(); + message.clientId = object.clientId ?? ""; + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + message.signatureOne = object.signatureOne !== undefined && object.signatureOne !== null ? SignatureAndData.fromPartial(object.signatureOne) : undefined; + message.signatureTwo = object.signatureTwo !== undefined && object.signatureTwo !== null ? SignatureAndData.fromPartial(object.signatureTwo) : undefined; + return message; + } + +}; + +function createBaseSignatureAndData(): SignatureAndData { + return { + signature: new Uint8Array(), + dataType: 0, + data: new Uint8Array(), + timestamp: Long.UZERO + }; +} + +export const SignatureAndData = { + encode(message: SignatureAndData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signature.length !== 0) { + writer.uint32(10).bytes(message.signature); + } + + if (message.dataType !== 0) { + writer.uint32(16).int32(message.dataType); + } + + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + + if (!message.timestamp.isZero()) { + writer.uint32(32).uint64(message.timestamp); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureAndData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignatureAndData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.signature = reader.bytes(); + break; + + case 2: + message.dataType = (reader.int32() as any); + break; + + case 3: + message.data = reader.bytes(); + break; + + case 4: + message.timestamp = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignatureAndData { + const message = createBaseSignatureAndData(); + message.signature = object.signature ?? new Uint8Array(); + message.dataType = object.dataType ?? 0; + message.data = object.data ?? new Uint8Array(); + message.timestamp = object.timestamp !== undefined && object.timestamp !== null ? Long.fromValue(object.timestamp) : Long.UZERO; + return message; + } + +}; + +function createBaseTimestampedSignatureData(): TimestampedSignatureData { + return { + signatureData: new Uint8Array(), + timestamp: Long.UZERO + }; +} + +export const TimestampedSignatureData = { + encode(message: TimestampedSignatureData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signatureData.length !== 0) { + writer.uint32(10).bytes(message.signatureData); + } + + if (!message.timestamp.isZero()) { + writer.uint32(16).uint64(message.timestamp); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TimestampedSignatureData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTimestampedSignatureData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.signatureData = reader.bytes(); + break; + + case 2: + message.timestamp = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TimestampedSignatureData { + const message = createBaseTimestampedSignatureData(); + message.signatureData = object.signatureData ?? new Uint8Array(); + message.timestamp = object.timestamp !== undefined && object.timestamp !== null ? Long.fromValue(object.timestamp) : Long.UZERO; + return message; + } + +}; + +function createBaseSignBytes(): SignBytes { + return { + sequence: Long.UZERO, + timestamp: Long.UZERO, + diversifier: "", + dataType: 0, + data: new Uint8Array() + }; +} + +export const SignBytes = { + encode(message: SignBytes, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.sequence.isZero()) { + writer.uint32(8).uint64(message.sequence); + } + + if (!message.timestamp.isZero()) { + writer.uint32(16).uint64(message.timestamp); + } + + if (message.diversifier !== "") { + writer.uint32(26).string(message.diversifier); + } + + if (message.dataType !== 0) { + writer.uint32(32).int32(message.dataType); + } + + if (message.data.length !== 0) { + writer.uint32(42).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignBytes { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignBytes(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sequence = (reader.uint64() as Long); + break; + + case 2: + message.timestamp = (reader.uint64() as Long); + break; + + case 3: + message.diversifier = reader.string(); + break; + + case 4: + message.dataType = (reader.int32() as any); + break; + + case 5: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignBytes { + const message = createBaseSignBytes(); + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + message.timestamp = object.timestamp !== undefined && object.timestamp !== null ? Long.fromValue(object.timestamp) : Long.UZERO; + message.diversifier = object.diversifier ?? ""; + message.dataType = object.dataType ?? 0; + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseHeaderData(): HeaderData { + return { + newPubKey: undefined, + newDiversifier: "" + }; +} + +export const HeaderData = { + encode(message: HeaderData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.newPubKey !== undefined) { + Any.encode(message.newPubKey, writer.uint32(10).fork()).ldelim(); + } + + if (message.newDiversifier !== "") { + writer.uint32(18).string(message.newDiversifier); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HeaderData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHeaderData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.newPubKey = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.newDiversifier = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): HeaderData { + const message = createBaseHeaderData(); + message.newPubKey = object.newPubKey !== undefined && object.newPubKey !== null ? Any.fromPartial(object.newPubKey) : undefined; + message.newDiversifier = object.newDiversifier ?? ""; + return message; + } + +}; + +function createBaseClientStateData(): ClientStateData { + return { + path: new Uint8Array(), + clientState: undefined + }; +} + +export const ClientStateData = { + encode(message: ClientStateData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (message.clientState !== undefined) { + Any.encode(message.clientState, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ClientStateData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientStateData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.clientState = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ClientStateData { + const message = createBaseClientStateData(); + message.path = object.path ?? new Uint8Array(); + message.clientState = object.clientState !== undefined && object.clientState !== null ? Any.fromPartial(object.clientState) : undefined; + return message; + } + +}; + +function createBaseConsensusStateData(): ConsensusStateData { + return { + path: new Uint8Array(), + consensusState: undefined + }; +} + +export const ConsensusStateData = { + encode(message: ConsensusStateData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (message.consensusState !== undefined) { + Any.encode(message.consensusState, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusStateData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensusStateData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.consensusState = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConsensusStateData { + const message = createBaseConsensusStateData(); + message.path = object.path ?? new Uint8Array(); + message.consensusState = object.consensusState !== undefined && object.consensusState !== null ? Any.fromPartial(object.consensusState) : undefined; + return message; + } + +}; + +function createBaseConnectionStateData(): ConnectionStateData { + return { + path: new Uint8Array(), + connection: undefined + }; +} + +export const ConnectionStateData = { + encode(message: ConnectionStateData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (message.connection !== undefined) { + ConnectionEnd.encode(message.connection, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConnectionStateData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConnectionStateData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.connection = ConnectionEnd.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConnectionStateData { + const message = createBaseConnectionStateData(); + message.path = object.path ?? new Uint8Array(); + message.connection = object.connection !== undefined && object.connection !== null ? ConnectionEnd.fromPartial(object.connection) : undefined; + return message; + } + +}; + +function createBaseChannelStateData(): ChannelStateData { + return { + path: new Uint8Array(), + channel: undefined + }; +} + +export const ChannelStateData = { + encode(message: ChannelStateData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (message.channel !== undefined) { + Channel.encode(message.channel, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ChannelStateData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseChannelStateData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.channel = Channel.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ChannelStateData { + const message = createBaseChannelStateData(); + message.path = object.path ?? new Uint8Array(); + message.channel = object.channel !== undefined && object.channel !== null ? Channel.fromPartial(object.channel) : undefined; + return message; + } + +}; + +function createBasePacketCommitmentData(): PacketCommitmentData { + return { + path: new Uint8Array(), + commitment: new Uint8Array() + }; +} + +export const PacketCommitmentData = { + encode(message: PacketCommitmentData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (message.commitment.length !== 0) { + writer.uint32(18).bytes(message.commitment); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PacketCommitmentData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePacketCommitmentData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.commitment = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PacketCommitmentData { + const message = createBasePacketCommitmentData(); + message.path = object.path ?? new Uint8Array(); + message.commitment = object.commitment ?? new Uint8Array(); + return message; + } + +}; + +function createBasePacketAcknowledgementData(): PacketAcknowledgementData { + return { + path: new Uint8Array(), + acknowledgement: new Uint8Array() + }; +} + +export const PacketAcknowledgementData = { + encode(message: PacketAcknowledgementData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (message.acknowledgement.length !== 0) { + writer.uint32(18).bytes(message.acknowledgement); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PacketAcknowledgementData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePacketAcknowledgementData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.acknowledgement = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PacketAcknowledgementData { + const message = createBasePacketAcknowledgementData(); + message.path = object.path ?? new Uint8Array(); + message.acknowledgement = object.acknowledgement ?? new Uint8Array(); + return message; + } + +}; + +function createBasePacketReceiptAbsenceData(): PacketReceiptAbsenceData { + return { + path: new Uint8Array() + }; +} + +export const PacketReceiptAbsenceData = { + encode(message: PacketReceiptAbsenceData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PacketReceiptAbsenceData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePacketReceiptAbsenceData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PacketReceiptAbsenceData { + const message = createBasePacketReceiptAbsenceData(); + message.path = object.path ?? new Uint8Array(); + return message; + } + +}; + +function createBaseNextSequenceRecvData(): NextSequenceRecvData { + return { + path: new Uint8Array(), + nextSeqRecv: Long.UZERO + }; +} + +export const NextSequenceRecvData = { + encode(message: NextSequenceRecvData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (!message.nextSeqRecv.isZero()) { + writer.uint32(16).uint64(message.nextSeqRecv); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NextSequenceRecvData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNextSequenceRecvData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.nextSeqRecv = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): NextSequenceRecvData { + const message = createBaseNextSequenceRecvData(); + message.path = object.path ?? new Uint8Array(); + message.nextSeqRecv = object.nextSeqRecv !== undefined && object.nextSeqRecv !== null ? Long.fromValue(object.nextSeqRecv) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/lightclients/solomachine/v2/solomachine.ts b/packages/codegen/src/ibc/lightclients/solomachine/v2/solomachine.ts new file mode 100644 index 00000000..81fa2925 --- /dev/null +++ b/packages/codegen/src/ibc/lightclients/solomachine/v2/solomachine.ts @@ -0,0 +1,1441 @@ +import { Any, AnySDKType } from "../../../../google/protobuf/any"; +import { ConnectionEnd, ConnectionEndSDKType } from "../../../core/connection/v1/connection"; +import { Channel, ChannelSDKType } from "../../../core/channel/v1/channel"; +import { Long, DeepPartial } from "../../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * DataType defines the type of solo machine proof being created. This is done + * to preserve uniqueness of different data sign byte encodings. + */ + +export enum DataType { + /** DATA_TYPE_UNINITIALIZED_UNSPECIFIED - Default State */ + DATA_TYPE_UNINITIALIZED_UNSPECIFIED = 0, + + /** DATA_TYPE_CLIENT_STATE - Data type for client state verification */ + DATA_TYPE_CLIENT_STATE = 1, + + /** DATA_TYPE_CONSENSUS_STATE - Data type for consensus state verification */ + DATA_TYPE_CONSENSUS_STATE = 2, + + /** DATA_TYPE_CONNECTION_STATE - Data type for connection state verification */ + DATA_TYPE_CONNECTION_STATE = 3, + + /** DATA_TYPE_CHANNEL_STATE - Data type for channel state verification */ + DATA_TYPE_CHANNEL_STATE = 4, + + /** DATA_TYPE_PACKET_COMMITMENT - Data type for packet commitment verification */ + DATA_TYPE_PACKET_COMMITMENT = 5, + + /** DATA_TYPE_PACKET_ACKNOWLEDGEMENT - Data type for packet acknowledgement verification */ + DATA_TYPE_PACKET_ACKNOWLEDGEMENT = 6, + + /** DATA_TYPE_PACKET_RECEIPT_ABSENCE - Data type for packet receipt absence verification */ + DATA_TYPE_PACKET_RECEIPT_ABSENCE = 7, + + /** DATA_TYPE_NEXT_SEQUENCE_RECV - Data type for next sequence recv verification */ + DATA_TYPE_NEXT_SEQUENCE_RECV = 8, + + /** DATA_TYPE_HEADER - Data type for header verification */ + DATA_TYPE_HEADER = 9, + UNRECOGNIZED = -1, +} +export const DataTypeSDKType = DataType; +export function dataTypeFromJSON(object: any): DataType { + switch (object) { + case 0: + case "DATA_TYPE_UNINITIALIZED_UNSPECIFIED": + return DataType.DATA_TYPE_UNINITIALIZED_UNSPECIFIED; + + case 1: + case "DATA_TYPE_CLIENT_STATE": + return DataType.DATA_TYPE_CLIENT_STATE; + + case 2: + case "DATA_TYPE_CONSENSUS_STATE": + return DataType.DATA_TYPE_CONSENSUS_STATE; + + case 3: + case "DATA_TYPE_CONNECTION_STATE": + return DataType.DATA_TYPE_CONNECTION_STATE; + + case 4: + case "DATA_TYPE_CHANNEL_STATE": + return DataType.DATA_TYPE_CHANNEL_STATE; + + case 5: + case "DATA_TYPE_PACKET_COMMITMENT": + return DataType.DATA_TYPE_PACKET_COMMITMENT; + + case 6: + case "DATA_TYPE_PACKET_ACKNOWLEDGEMENT": + return DataType.DATA_TYPE_PACKET_ACKNOWLEDGEMENT; + + case 7: + case "DATA_TYPE_PACKET_RECEIPT_ABSENCE": + return DataType.DATA_TYPE_PACKET_RECEIPT_ABSENCE; + + case 8: + case "DATA_TYPE_NEXT_SEQUENCE_RECV": + return DataType.DATA_TYPE_NEXT_SEQUENCE_RECV; + + case 9: + case "DATA_TYPE_HEADER": + return DataType.DATA_TYPE_HEADER; + + case -1: + case "UNRECOGNIZED": + default: + return DataType.UNRECOGNIZED; + } +} +export function dataTypeToJSON(object: DataType): string { + switch (object) { + case DataType.DATA_TYPE_UNINITIALIZED_UNSPECIFIED: + return "DATA_TYPE_UNINITIALIZED_UNSPECIFIED"; + + case DataType.DATA_TYPE_CLIENT_STATE: + return "DATA_TYPE_CLIENT_STATE"; + + case DataType.DATA_TYPE_CONSENSUS_STATE: + return "DATA_TYPE_CONSENSUS_STATE"; + + case DataType.DATA_TYPE_CONNECTION_STATE: + return "DATA_TYPE_CONNECTION_STATE"; + + case DataType.DATA_TYPE_CHANNEL_STATE: + return "DATA_TYPE_CHANNEL_STATE"; + + case DataType.DATA_TYPE_PACKET_COMMITMENT: + return "DATA_TYPE_PACKET_COMMITMENT"; + + case DataType.DATA_TYPE_PACKET_ACKNOWLEDGEMENT: + return "DATA_TYPE_PACKET_ACKNOWLEDGEMENT"; + + case DataType.DATA_TYPE_PACKET_RECEIPT_ABSENCE: + return "DATA_TYPE_PACKET_RECEIPT_ABSENCE"; + + case DataType.DATA_TYPE_NEXT_SEQUENCE_RECV: + return "DATA_TYPE_NEXT_SEQUENCE_RECV"; + + case DataType.DATA_TYPE_HEADER: + return "DATA_TYPE_HEADER"; + + case DataType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** + * ClientState defines a solo machine client that tracks the current consensus + * state and if the client is frozen. + */ + +export interface ClientState { + /** latest sequence of the client state */ + sequence: Long; + /** frozen sequence of the solo machine */ + + isFrozen: boolean; + consensusState?: ConsensusState; + /** + * when set to true, will allow governance to update a solo machine client. + * The client will be unfrozen if it is frozen. + */ + + allowUpdateAfterProposal: boolean; +} +/** + * ClientState defines a solo machine client that tracks the current consensus + * state and if the client is frozen. + */ + +export interface ClientStateSDKType { + sequence: Long; + is_frozen: boolean; + consensus_state?: ConsensusStateSDKType; + allow_update_after_proposal: boolean; +} +/** + * ConsensusState defines a solo machine consensus state. The sequence of a + * consensus state is contained in the "height" key used in storing the + * consensus state. + */ + +export interface ConsensusState { + /** public key of the solo machine */ + publicKey?: Any; + /** + * diversifier allows the same public key to be re-used across different solo + * machine clients (potentially on different chains) without being considered + * misbehaviour. + */ + + diversifier: string; + timestamp: Long; +} +/** + * ConsensusState defines a solo machine consensus state. The sequence of a + * consensus state is contained in the "height" key used in storing the + * consensus state. + */ + +export interface ConsensusStateSDKType { + public_key?: AnySDKType; + diversifier: string; + timestamp: Long; +} +/** Header defines a solo machine consensus header */ + +export interface Header { + /** sequence to update solo machine public key at */ + sequence: Long; + timestamp: Long; + signature: Uint8Array; + newPublicKey?: Any; + newDiversifier: string; +} +/** Header defines a solo machine consensus header */ + +export interface HeaderSDKType { + sequence: Long; + timestamp: Long; + signature: Uint8Array; + new_public_key?: AnySDKType; + new_diversifier: string; +} +/** + * Misbehaviour defines misbehaviour for a solo machine which consists + * of a sequence and two signatures over different messages at that sequence. + */ + +export interface Misbehaviour { + clientId: string; + sequence: Long; + signatureOne?: SignatureAndData; + signatureTwo?: SignatureAndData; +} +/** + * Misbehaviour defines misbehaviour for a solo machine which consists + * of a sequence and two signatures over different messages at that sequence. + */ + +export interface MisbehaviourSDKType { + client_id: string; + sequence: Long; + signature_one?: SignatureAndDataSDKType; + signature_two?: SignatureAndDataSDKType; +} +/** + * SignatureAndData contains a signature and the data signed over to create that + * signature. + */ + +export interface SignatureAndData { + signature: Uint8Array; + dataType: DataType; + data: Uint8Array; + timestamp: Long; +} +/** + * SignatureAndData contains a signature and the data signed over to create that + * signature. + */ + +export interface SignatureAndDataSDKType { + signature: Uint8Array; + data_type: DataType; + data: Uint8Array; + timestamp: Long; +} +/** + * TimestampedSignatureData contains the signature data and the timestamp of the + * signature. + */ + +export interface TimestampedSignatureData { + signatureData: Uint8Array; + timestamp: Long; +} +/** + * TimestampedSignatureData contains the signature data and the timestamp of the + * signature. + */ + +export interface TimestampedSignatureDataSDKType { + signature_data: Uint8Array; + timestamp: Long; +} +/** SignBytes defines the signed bytes used for signature verification. */ + +export interface SignBytes { + sequence: Long; + timestamp: Long; + diversifier: string; + /** type of the data used */ + + dataType: DataType; + /** marshaled data */ + + data: Uint8Array; +} +/** SignBytes defines the signed bytes used for signature verification. */ + +export interface SignBytesSDKType { + sequence: Long; + timestamp: Long; + diversifier: string; + data_type: DataType; + data: Uint8Array; +} +/** HeaderData returns the SignBytes data for update verification. */ + +export interface HeaderData { + /** header public key */ + newPubKey?: Any; + /** header diversifier */ + + newDiversifier: string; +} +/** HeaderData returns the SignBytes data for update verification. */ + +export interface HeaderDataSDKType { + new_pub_key?: AnySDKType; + new_diversifier: string; +} +/** ClientStateData returns the SignBytes data for client state verification. */ + +export interface ClientStateData { + path: Uint8Array; + clientState?: Any; +} +/** ClientStateData returns the SignBytes data for client state verification. */ + +export interface ClientStateDataSDKType { + path: Uint8Array; + client_state?: AnySDKType; +} +/** + * ConsensusStateData returns the SignBytes data for consensus state + * verification. + */ + +export interface ConsensusStateData { + path: Uint8Array; + consensusState?: Any; +} +/** + * ConsensusStateData returns the SignBytes data for consensus state + * verification. + */ + +export interface ConsensusStateDataSDKType { + path: Uint8Array; + consensus_state?: AnySDKType; +} +/** + * ConnectionStateData returns the SignBytes data for connection state + * verification. + */ + +export interface ConnectionStateData { + path: Uint8Array; + connection?: ConnectionEnd; +} +/** + * ConnectionStateData returns the SignBytes data for connection state + * verification. + */ + +export interface ConnectionStateDataSDKType { + path: Uint8Array; + connection?: ConnectionEndSDKType; +} +/** + * ChannelStateData returns the SignBytes data for channel state + * verification. + */ + +export interface ChannelStateData { + path: Uint8Array; + channel?: Channel; +} +/** + * ChannelStateData returns the SignBytes data for channel state + * verification. + */ + +export interface ChannelStateDataSDKType { + path: Uint8Array; + channel?: ChannelSDKType; +} +/** + * PacketCommitmentData returns the SignBytes data for packet commitment + * verification. + */ + +export interface PacketCommitmentData { + path: Uint8Array; + commitment: Uint8Array; +} +/** + * PacketCommitmentData returns the SignBytes data for packet commitment + * verification. + */ + +export interface PacketCommitmentDataSDKType { + path: Uint8Array; + commitment: Uint8Array; +} +/** + * PacketAcknowledgementData returns the SignBytes data for acknowledgement + * verification. + */ + +export interface PacketAcknowledgementData { + path: Uint8Array; + acknowledgement: Uint8Array; +} +/** + * PacketAcknowledgementData returns the SignBytes data for acknowledgement + * verification. + */ + +export interface PacketAcknowledgementDataSDKType { + path: Uint8Array; + acknowledgement: Uint8Array; +} +/** + * PacketReceiptAbsenceData returns the SignBytes data for + * packet receipt absence verification. + */ + +export interface PacketReceiptAbsenceData { + path: Uint8Array; +} +/** + * PacketReceiptAbsenceData returns the SignBytes data for + * packet receipt absence verification. + */ + +export interface PacketReceiptAbsenceDataSDKType { + path: Uint8Array; +} +/** + * NextSequenceRecvData returns the SignBytes data for verification of the next + * sequence to be received. + */ + +export interface NextSequenceRecvData { + path: Uint8Array; + nextSeqRecv: Long; +} +/** + * NextSequenceRecvData returns the SignBytes data for verification of the next + * sequence to be received. + */ + +export interface NextSequenceRecvDataSDKType { + path: Uint8Array; + next_seq_recv: Long; +} + +function createBaseClientState(): ClientState { + return { + sequence: Long.UZERO, + isFrozen: false, + consensusState: undefined, + allowUpdateAfterProposal: false + }; +} + +export const ClientState = { + encode(message: ClientState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.sequence.isZero()) { + writer.uint32(8).uint64(message.sequence); + } + + if (message.isFrozen === true) { + writer.uint32(16).bool(message.isFrozen); + } + + if (message.consensusState !== undefined) { + ConsensusState.encode(message.consensusState, writer.uint32(26).fork()).ldelim(); + } + + if (message.allowUpdateAfterProposal === true) { + writer.uint32(32).bool(message.allowUpdateAfterProposal); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ClientState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sequence = (reader.uint64() as Long); + break; + + case 2: + message.isFrozen = reader.bool(); + break; + + case 3: + message.consensusState = ConsensusState.decode(reader, reader.uint32()); + break; + + case 4: + message.allowUpdateAfterProposal = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ClientState { + const message = createBaseClientState(); + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + message.isFrozen = object.isFrozen ?? false; + message.consensusState = object.consensusState !== undefined && object.consensusState !== null ? ConsensusState.fromPartial(object.consensusState) : undefined; + message.allowUpdateAfterProposal = object.allowUpdateAfterProposal ?? false; + return message; + } + +}; + +function createBaseConsensusState(): ConsensusState { + return { + publicKey: undefined, + diversifier: "", + timestamp: Long.UZERO + }; +} + +export const ConsensusState = { + encode(message: ConsensusState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.publicKey !== undefined) { + Any.encode(message.publicKey, writer.uint32(10).fork()).ldelim(); + } + + if (message.diversifier !== "") { + writer.uint32(18).string(message.diversifier); + } + + if (!message.timestamp.isZero()) { + writer.uint32(24).uint64(message.timestamp); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensusState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.publicKey = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.diversifier = reader.string(); + break; + + case 3: + message.timestamp = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConsensusState { + const message = createBaseConsensusState(); + message.publicKey = object.publicKey !== undefined && object.publicKey !== null ? Any.fromPartial(object.publicKey) : undefined; + message.diversifier = object.diversifier ?? ""; + message.timestamp = object.timestamp !== undefined && object.timestamp !== null ? Long.fromValue(object.timestamp) : Long.UZERO; + return message; + } + +}; + +function createBaseHeader(): Header { + return { + sequence: Long.UZERO, + timestamp: Long.UZERO, + signature: new Uint8Array(), + newPublicKey: undefined, + newDiversifier: "" + }; +} + +export const Header = { + encode(message: Header, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.sequence.isZero()) { + writer.uint32(8).uint64(message.sequence); + } + + if (!message.timestamp.isZero()) { + writer.uint32(16).uint64(message.timestamp); + } + + if (message.signature.length !== 0) { + writer.uint32(26).bytes(message.signature); + } + + if (message.newPublicKey !== undefined) { + Any.encode(message.newPublicKey, writer.uint32(34).fork()).ldelim(); + } + + if (message.newDiversifier !== "") { + writer.uint32(42).string(message.newDiversifier); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Header { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHeader(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sequence = (reader.uint64() as Long); + break; + + case 2: + message.timestamp = (reader.uint64() as Long); + break; + + case 3: + message.signature = reader.bytes(); + break; + + case 4: + message.newPublicKey = Any.decode(reader, reader.uint32()); + break; + + case 5: + message.newDiversifier = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial
): Header { + const message = createBaseHeader(); + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + message.timestamp = object.timestamp !== undefined && object.timestamp !== null ? Long.fromValue(object.timestamp) : Long.UZERO; + message.signature = object.signature ?? new Uint8Array(); + message.newPublicKey = object.newPublicKey !== undefined && object.newPublicKey !== null ? Any.fromPartial(object.newPublicKey) : undefined; + message.newDiversifier = object.newDiversifier ?? ""; + return message; + } + +}; + +function createBaseMisbehaviour(): Misbehaviour { + return { + clientId: "", + sequence: Long.UZERO, + signatureOne: undefined, + signatureTwo: undefined + }; +} + +export const Misbehaviour = { + encode(message: Misbehaviour, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + if (!message.sequence.isZero()) { + writer.uint32(16).uint64(message.sequence); + } + + if (message.signatureOne !== undefined) { + SignatureAndData.encode(message.signatureOne, writer.uint32(26).fork()).ldelim(); + } + + if (message.signatureTwo !== undefined) { + SignatureAndData.encode(message.signatureTwo, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Misbehaviour { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMisbehaviour(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.sequence = (reader.uint64() as Long); + break; + + case 3: + message.signatureOne = SignatureAndData.decode(reader, reader.uint32()); + break; + + case 4: + message.signatureTwo = SignatureAndData.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Misbehaviour { + const message = createBaseMisbehaviour(); + message.clientId = object.clientId ?? ""; + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + message.signatureOne = object.signatureOne !== undefined && object.signatureOne !== null ? SignatureAndData.fromPartial(object.signatureOne) : undefined; + message.signatureTwo = object.signatureTwo !== undefined && object.signatureTwo !== null ? SignatureAndData.fromPartial(object.signatureTwo) : undefined; + return message; + } + +}; + +function createBaseSignatureAndData(): SignatureAndData { + return { + signature: new Uint8Array(), + dataType: 0, + data: new Uint8Array(), + timestamp: Long.UZERO + }; +} + +export const SignatureAndData = { + encode(message: SignatureAndData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signature.length !== 0) { + writer.uint32(10).bytes(message.signature); + } + + if (message.dataType !== 0) { + writer.uint32(16).int32(message.dataType); + } + + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + + if (!message.timestamp.isZero()) { + writer.uint32(32).uint64(message.timestamp); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureAndData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignatureAndData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.signature = reader.bytes(); + break; + + case 2: + message.dataType = (reader.int32() as any); + break; + + case 3: + message.data = reader.bytes(); + break; + + case 4: + message.timestamp = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignatureAndData { + const message = createBaseSignatureAndData(); + message.signature = object.signature ?? new Uint8Array(); + message.dataType = object.dataType ?? 0; + message.data = object.data ?? new Uint8Array(); + message.timestamp = object.timestamp !== undefined && object.timestamp !== null ? Long.fromValue(object.timestamp) : Long.UZERO; + return message; + } + +}; + +function createBaseTimestampedSignatureData(): TimestampedSignatureData { + return { + signatureData: new Uint8Array(), + timestamp: Long.UZERO + }; +} + +export const TimestampedSignatureData = { + encode(message: TimestampedSignatureData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signatureData.length !== 0) { + writer.uint32(10).bytes(message.signatureData); + } + + if (!message.timestamp.isZero()) { + writer.uint32(16).uint64(message.timestamp); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TimestampedSignatureData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTimestampedSignatureData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.signatureData = reader.bytes(); + break; + + case 2: + message.timestamp = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TimestampedSignatureData { + const message = createBaseTimestampedSignatureData(); + message.signatureData = object.signatureData ?? new Uint8Array(); + message.timestamp = object.timestamp !== undefined && object.timestamp !== null ? Long.fromValue(object.timestamp) : Long.UZERO; + return message; + } + +}; + +function createBaseSignBytes(): SignBytes { + return { + sequence: Long.UZERO, + timestamp: Long.UZERO, + diversifier: "", + dataType: 0, + data: new Uint8Array() + }; +} + +export const SignBytes = { + encode(message: SignBytes, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.sequence.isZero()) { + writer.uint32(8).uint64(message.sequence); + } + + if (!message.timestamp.isZero()) { + writer.uint32(16).uint64(message.timestamp); + } + + if (message.diversifier !== "") { + writer.uint32(26).string(message.diversifier); + } + + if (message.dataType !== 0) { + writer.uint32(32).int32(message.dataType); + } + + if (message.data.length !== 0) { + writer.uint32(42).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignBytes { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignBytes(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.sequence = (reader.uint64() as Long); + break; + + case 2: + message.timestamp = (reader.uint64() as Long); + break; + + case 3: + message.diversifier = reader.string(); + break; + + case 4: + message.dataType = (reader.int32() as any); + break; + + case 5: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignBytes { + const message = createBaseSignBytes(); + message.sequence = object.sequence !== undefined && object.sequence !== null ? Long.fromValue(object.sequence) : Long.UZERO; + message.timestamp = object.timestamp !== undefined && object.timestamp !== null ? Long.fromValue(object.timestamp) : Long.UZERO; + message.diversifier = object.diversifier ?? ""; + message.dataType = object.dataType ?? 0; + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseHeaderData(): HeaderData { + return { + newPubKey: undefined, + newDiversifier: "" + }; +} + +export const HeaderData = { + encode(message: HeaderData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.newPubKey !== undefined) { + Any.encode(message.newPubKey, writer.uint32(10).fork()).ldelim(); + } + + if (message.newDiversifier !== "") { + writer.uint32(18).string(message.newDiversifier); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HeaderData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHeaderData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.newPubKey = Any.decode(reader, reader.uint32()); + break; + + case 2: + message.newDiversifier = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): HeaderData { + const message = createBaseHeaderData(); + message.newPubKey = object.newPubKey !== undefined && object.newPubKey !== null ? Any.fromPartial(object.newPubKey) : undefined; + message.newDiversifier = object.newDiversifier ?? ""; + return message; + } + +}; + +function createBaseClientStateData(): ClientStateData { + return { + path: new Uint8Array(), + clientState: undefined + }; +} + +export const ClientStateData = { + encode(message: ClientStateData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (message.clientState !== undefined) { + Any.encode(message.clientState, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ClientStateData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientStateData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.clientState = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ClientStateData { + const message = createBaseClientStateData(); + message.path = object.path ?? new Uint8Array(); + message.clientState = object.clientState !== undefined && object.clientState !== null ? Any.fromPartial(object.clientState) : undefined; + return message; + } + +}; + +function createBaseConsensusStateData(): ConsensusStateData { + return { + path: new Uint8Array(), + consensusState: undefined + }; +} + +export const ConsensusStateData = { + encode(message: ConsensusStateData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (message.consensusState !== undefined) { + Any.encode(message.consensusState, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusStateData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensusStateData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.consensusState = Any.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConsensusStateData { + const message = createBaseConsensusStateData(); + message.path = object.path ?? new Uint8Array(); + message.consensusState = object.consensusState !== undefined && object.consensusState !== null ? Any.fromPartial(object.consensusState) : undefined; + return message; + } + +}; + +function createBaseConnectionStateData(): ConnectionStateData { + return { + path: new Uint8Array(), + connection: undefined + }; +} + +export const ConnectionStateData = { + encode(message: ConnectionStateData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (message.connection !== undefined) { + ConnectionEnd.encode(message.connection, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConnectionStateData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConnectionStateData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.connection = ConnectionEnd.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConnectionStateData { + const message = createBaseConnectionStateData(); + message.path = object.path ?? new Uint8Array(); + message.connection = object.connection !== undefined && object.connection !== null ? ConnectionEnd.fromPartial(object.connection) : undefined; + return message; + } + +}; + +function createBaseChannelStateData(): ChannelStateData { + return { + path: new Uint8Array(), + channel: undefined + }; +} + +export const ChannelStateData = { + encode(message: ChannelStateData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (message.channel !== undefined) { + Channel.encode(message.channel, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ChannelStateData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseChannelStateData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.channel = Channel.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ChannelStateData { + const message = createBaseChannelStateData(); + message.path = object.path ?? new Uint8Array(); + message.channel = object.channel !== undefined && object.channel !== null ? Channel.fromPartial(object.channel) : undefined; + return message; + } + +}; + +function createBasePacketCommitmentData(): PacketCommitmentData { + return { + path: new Uint8Array(), + commitment: new Uint8Array() + }; +} + +export const PacketCommitmentData = { + encode(message: PacketCommitmentData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (message.commitment.length !== 0) { + writer.uint32(18).bytes(message.commitment); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PacketCommitmentData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePacketCommitmentData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.commitment = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PacketCommitmentData { + const message = createBasePacketCommitmentData(); + message.path = object.path ?? new Uint8Array(); + message.commitment = object.commitment ?? new Uint8Array(); + return message; + } + +}; + +function createBasePacketAcknowledgementData(): PacketAcknowledgementData { + return { + path: new Uint8Array(), + acknowledgement: new Uint8Array() + }; +} + +export const PacketAcknowledgementData = { + encode(message: PacketAcknowledgementData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (message.acknowledgement.length !== 0) { + writer.uint32(18).bytes(message.acknowledgement); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PacketAcknowledgementData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePacketAcknowledgementData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.acknowledgement = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PacketAcknowledgementData { + const message = createBasePacketAcknowledgementData(); + message.path = object.path ?? new Uint8Array(); + message.acknowledgement = object.acknowledgement ?? new Uint8Array(); + return message; + } + +}; + +function createBasePacketReceiptAbsenceData(): PacketReceiptAbsenceData { + return { + path: new Uint8Array() + }; +} + +export const PacketReceiptAbsenceData = { + encode(message: PacketReceiptAbsenceData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PacketReceiptAbsenceData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePacketReceiptAbsenceData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PacketReceiptAbsenceData { + const message = createBasePacketReceiptAbsenceData(); + message.path = object.path ?? new Uint8Array(); + return message; + } + +}; + +function createBaseNextSequenceRecvData(): NextSequenceRecvData { + return { + path: new Uint8Array(), + nextSeqRecv: Long.UZERO + }; +} + +export const NextSequenceRecvData = { + encode(message: NextSequenceRecvData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path.length !== 0) { + writer.uint32(10).bytes(message.path); + } + + if (!message.nextSeqRecv.isZero()) { + writer.uint32(16).uint64(message.nextSeqRecv); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NextSequenceRecvData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNextSequenceRecvData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.path = reader.bytes(); + break; + + case 2: + message.nextSeqRecv = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): NextSequenceRecvData { + const message = createBaseNextSequenceRecvData(); + message.path = object.path ?? new Uint8Array(); + message.nextSeqRecv = object.nextSeqRecv !== undefined && object.nextSeqRecv !== null ? Long.fromValue(object.nextSeqRecv) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/lightclients/tendermint/v1/tendermint.ts b/packages/codegen/src/ibc/lightclients/tendermint/v1/tendermint.ts new file mode 100644 index 00000000..28e6c0f7 --- /dev/null +++ b/packages/codegen/src/ibc/lightclients/tendermint/v1/tendermint.ts @@ -0,0 +1,585 @@ +import { Duration, DurationSDKType } from "../../../../google/protobuf/duration"; +import { Height, HeightSDKType } from "../../../core/client/v1/client"; +import { ProofSpec, ProofSpecSDKType } from "../../../../confio/proofs"; +import { Timestamp } from "../../../../google/protobuf/timestamp"; +import { MerkleRoot, MerkleRootSDKType } from "../../../core/commitment/v1/commitment"; +import { SignedHeader, SignedHeaderSDKType } from "../../../../tendermint/types/types"; +import { ValidatorSet, ValidatorSetSDKType } from "../../../../tendermint/types/validator"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, toTimestamp, fromTimestamp, Long } from "../../../../helpers"; +/** + * ClientState from Tendermint tracks the current validator set, latest height, + * and a possible frozen height. + */ + +export interface ClientState { + chainId: string; + trustLevel?: Fraction; + /** + * duration of the period since the LastestTimestamp during which the + * submitted headers are valid for upgrade + */ + + trustingPeriod?: Duration; + /** duration of the staking unbonding period */ + + unbondingPeriod?: Duration; + /** defines how much new (untrusted) header's Time can drift into the future. */ + + maxClockDrift?: Duration; + /** Block height when the client was frozen due to a misbehaviour */ + + frozenHeight?: Height; + /** Latest height the client was updated to */ + + latestHeight?: Height; + /** Proof specifications used in verifying counterparty state */ + + proofSpecs: ProofSpec[]; + /** + * Path at which next upgraded client will be committed. + * Each element corresponds to the key for a single CommitmentProof in the + * chained proof. NOTE: ClientState must stored under + * `{upgradePath}/{upgradeHeight}/clientState` ConsensusState must be stored + * under `{upgradepath}/{upgradeHeight}/consensusState` For SDK chains using + * the default upgrade module, upgrade_path should be []string{"upgrade", + * "upgradedIBCState"}` + */ + + upgradePath: string[]; + /** + * This flag, when set to true, will allow governance to recover a client + * which has expired + */ + + allowUpdateAfterExpiry: boolean; + /** + * This flag, when set to true, will allow governance to unfreeze a client + * whose chain has experienced a misbehaviour event + */ + + allowUpdateAfterMisbehaviour: boolean; +} +/** + * ClientState from Tendermint tracks the current validator set, latest height, + * and a possible frozen height. + */ + +export interface ClientStateSDKType { + chain_id: string; + trust_level?: FractionSDKType; + trusting_period?: DurationSDKType; + unbonding_period?: DurationSDKType; + max_clock_drift?: DurationSDKType; + frozen_height?: HeightSDKType; + latest_height?: HeightSDKType; + proof_specs: ProofSpecSDKType[]; + upgrade_path: string[]; + allow_update_after_expiry: boolean; + allow_update_after_misbehaviour: boolean; +} +/** ConsensusState defines the consensus state from Tendermint. */ + +export interface ConsensusState { + /** + * timestamp that corresponds to the block height in which the ConsensusState + * was stored. + */ + timestamp?: Date; + /** commitment root (i.e app hash) */ + + root?: MerkleRoot; + nextValidatorsHash: Uint8Array; +} +/** ConsensusState defines the consensus state from Tendermint. */ + +export interface ConsensusStateSDKType { + timestamp?: Date; + root?: MerkleRootSDKType; + next_validators_hash: Uint8Array; +} +/** + * Misbehaviour is a wrapper over two conflicting Headers + * that implements Misbehaviour interface expected by ICS-02 + */ + +export interface Misbehaviour { + clientId: string; + header1?: Header; + header2?: Header; +} +/** + * Misbehaviour is a wrapper over two conflicting Headers + * that implements Misbehaviour interface expected by ICS-02 + */ + +export interface MisbehaviourSDKType { + client_id: string; + header_1?: HeaderSDKType; + header_2?: HeaderSDKType; +} +/** + * Header defines the Tendermint client consensus Header. + * It encapsulates all the information necessary to update from a trusted + * Tendermint ConsensusState. The inclusion of TrustedHeight and + * TrustedValidators allows this update to process correctly, so long as the + * ConsensusState for the TrustedHeight exists, this removes race conditions + * among relayers The SignedHeader and ValidatorSet are the new untrusted update + * fields for the client. The TrustedHeight is the height of a stored + * ConsensusState on the client that will be used to verify the new untrusted + * header. The Trusted ConsensusState must be within the unbonding period of + * current time in order to correctly verify, and the TrustedValidators must + * hash to TrustedConsensusState.NextValidatorsHash since that is the last + * trusted validator set at the TrustedHeight. + */ + +export interface Header { + signedHeader?: SignedHeader; + validatorSet?: ValidatorSet; + trustedHeight?: Height; + trustedValidators?: ValidatorSet; +} +/** + * Header defines the Tendermint client consensus Header. + * It encapsulates all the information necessary to update from a trusted + * Tendermint ConsensusState. The inclusion of TrustedHeight and + * TrustedValidators allows this update to process correctly, so long as the + * ConsensusState for the TrustedHeight exists, this removes race conditions + * among relayers The SignedHeader and ValidatorSet are the new untrusted update + * fields for the client. The TrustedHeight is the height of a stored + * ConsensusState on the client that will be used to verify the new untrusted + * header. The Trusted ConsensusState must be within the unbonding period of + * current time in order to correctly verify, and the TrustedValidators must + * hash to TrustedConsensusState.NextValidatorsHash since that is the last + * trusted validator set at the TrustedHeight. + */ + +export interface HeaderSDKType { + signed_header?: SignedHeaderSDKType; + validator_set?: ValidatorSetSDKType; + trusted_height?: HeightSDKType; + trusted_validators?: ValidatorSetSDKType; +} +/** + * Fraction defines the protobuf message type for tmmath.Fraction that only + * supports positive values. + */ + +export interface Fraction { + numerator: Long; + denominator: Long; +} +/** + * Fraction defines the protobuf message type for tmmath.Fraction that only + * supports positive values. + */ + +export interface FractionSDKType { + numerator: Long; + denominator: Long; +} + +function createBaseClientState(): ClientState { + return { + chainId: "", + trustLevel: undefined, + trustingPeriod: undefined, + unbondingPeriod: undefined, + maxClockDrift: undefined, + frozenHeight: undefined, + latestHeight: undefined, + proofSpecs: [], + upgradePath: [], + allowUpdateAfterExpiry: false, + allowUpdateAfterMisbehaviour: false + }; +} + +export const ClientState = { + encode(message: ClientState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.chainId !== "") { + writer.uint32(10).string(message.chainId); + } + + if (message.trustLevel !== undefined) { + Fraction.encode(message.trustLevel, writer.uint32(18).fork()).ldelim(); + } + + if (message.trustingPeriod !== undefined) { + Duration.encode(message.trustingPeriod, writer.uint32(26).fork()).ldelim(); + } + + if (message.unbondingPeriod !== undefined) { + Duration.encode(message.unbondingPeriod, writer.uint32(34).fork()).ldelim(); + } + + if (message.maxClockDrift !== undefined) { + Duration.encode(message.maxClockDrift, writer.uint32(42).fork()).ldelim(); + } + + if (message.frozenHeight !== undefined) { + Height.encode(message.frozenHeight, writer.uint32(50).fork()).ldelim(); + } + + if (message.latestHeight !== undefined) { + Height.encode(message.latestHeight, writer.uint32(58).fork()).ldelim(); + } + + for (const v of message.proofSpecs) { + ProofSpec.encode(v!, writer.uint32(66).fork()).ldelim(); + } + + for (const v of message.upgradePath) { + writer.uint32(74).string(v!); + } + + if (message.allowUpdateAfterExpiry === true) { + writer.uint32(80).bool(message.allowUpdateAfterExpiry); + } + + if (message.allowUpdateAfterMisbehaviour === true) { + writer.uint32(88).bool(message.allowUpdateAfterMisbehaviour); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ClientState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.chainId = reader.string(); + break; + + case 2: + message.trustLevel = Fraction.decode(reader, reader.uint32()); + break; + + case 3: + message.trustingPeriod = Duration.decode(reader, reader.uint32()); + break; + + case 4: + message.unbondingPeriod = Duration.decode(reader, reader.uint32()); + break; + + case 5: + message.maxClockDrift = Duration.decode(reader, reader.uint32()); + break; + + case 6: + message.frozenHeight = Height.decode(reader, reader.uint32()); + break; + + case 7: + message.latestHeight = Height.decode(reader, reader.uint32()); + break; + + case 8: + message.proofSpecs.push(ProofSpec.decode(reader, reader.uint32())); + break; + + case 9: + message.upgradePath.push(reader.string()); + break; + + case 10: + message.allowUpdateAfterExpiry = reader.bool(); + break; + + case 11: + message.allowUpdateAfterMisbehaviour = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ClientState { + const message = createBaseClientState(); + message.chainId = object.chainId ?? ""; + message.trustLevel = object.trustLevel !== undefined && object.trustLevel !== null ? Fraction.fromPartial(object.trustLevel) : undefined; + message.trustingPeriod = object.trustingPeriod !== undefined && object.trustingPeriod !== null ? Duration.fromPartial(object.trustingPeriod) : undefined; + message.unbondingPeriod = object.unbondingPeriod !== undefined && object.unbondingPeriod !== null ? Duration.fromPartial(object.unbondingPeriod) : undefined; + message.maxClockDrift = object.maxClockDrift !== undefined && object.maxClockDrift !== null ? Duration.fromPartial(object.maxClockDrift) : undefined; + message.frozenHeight = object.frozenHeight !== undefined && object.frozenHeight !== null ? Height.fromPartial(object.frozenHeight) : undefined; + message.latestHeight = object.latestHeight !== undefined && object.latestHeight !== null ? Height.fromPartial(object.latestHeight) : undefined; + message.proofSpecs = object.proofSpecs?.map(e => ProofSpec.fromPartial(e)) || []; + message.upgradePath = object.upgradePath?.map(e => e) || []; + message.allowUpdateAfterExpiry = object.allowUpdateAfterExpiry ?? false; + message.allowUpdateAfterMisbehaviour = object.allowUpdateAfterMisbehaviour ?? false; + return message; + } + +}; + +function createBaseConsensusState(): ConsensusState { + return { + timestamp: undefined, + root: undefined, + nextValidatorsHash: new Uint8Array() + }; +} + +export const ConsensusState = { + encode(message: ConsensusState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(10).fork()).ldelim(); + } + + if (message.root !== undefined) { + MerkleRoot.encode(message.root, writer.uint32(18).fork()).ldelim(); + } + + if (message.nextValidatorsHash.length !== 0) { + writer.uint32(26).bytes(message.nextValidatorsHash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensusState(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 2: + message.root = MerkleRoot.decode(reader, reader.uint32()); + break; + + case 3: + message.nextValidatorsHash = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConsensusState { + const message = createBaseConsensusState(); + message.timestamp = object.timestamp ?? undefined; + message.root = object.root !== undefined && object.root !== null ? MerkleRoot.fromPartial(object.root) : undefined; + message.nextValidatorsHash = object.nextValidatorsHash ?? new Uint8Array(); + return message; + } + +}; + +function createBaseMisbehaviour(): Misbehaviour { + return { + clientId: "", + header1: undefined, + header2: undefined + }; +} + +export const Misbehaviour = { + encode(message: Misbehaviour, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.clientId !== "") { + writer.uint32(10).string(message.clientId); + } + + if (message.header1 !== undefined) { + Header.encode(message.header1, writer.uint32(18).fork()).ldelim(); + } + + if (message.header2 !== undefined) { + Header.encode(message.header2, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Misbehaviour { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMisbehaviour(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + + case 2: + message.header1 = Header.decode(reader, reader.uint32()); + break; + + case 3: + message.header2 = Header.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Misbehaviour { + const message = createBaseMisbehaviour(); + message.clientId = object.clientId ?? ""; + message.header1 = object.header1 !== undefined && object.header1 !== null ? Header.fromPartial(object.header1) : undefined; + message.header2 = object.header2 !== undefined && object.header2 !== null ? Header.fromPartial(object.header2) : undefined; + return message; + } + +}; + +function createBaseHeader(): Header { + return { + signedHeader: undefined, + validatorSet: undefined, + trustedHeight: undefined, + trustedValidators: undefined + }; +} + +export const Header = { + encode(message: Header, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signedHeader !== undefined) { + SignedHeader.encode(message.signedHeader, writer.uint32(10).fork()).ldelim(); + } + + if (message.validatorSet !== undefined) { + ValidatorSet.encode(message.validatorSet, writer.uint32(18).fork()).ldelim(); + } + + if (message.trustedHeight !== undefined) { + Height.encode(message.trustedHeight, writer.uint32(26).fork()).ldelim(); + } + + if (message.trustedValidators !== undefined) { + ValidatorSet.encode(message.trustedValidators, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Header { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHeader(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.signedHeader = SignedHeader.decode(reader, reader.uint32()); + break; + + case 2: + message.validatorSet = ValidatorSet.decode(reader, reader.uint32()); + break; + + case 3: + message.trustedHeight = Height.decode(reader, reader.uint32()); + break; + + case 4: + message.trustedValidators = ValidatorSet.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial
): Header { + const message = createBaseHeader(); + message.signedHeader = object.signedHeader !== undefined && object.signedHeader !== null ? SignedHeader.fromPartial(object.signedHeader) : undefined; + message.validatorSet = object.validatorSet !== undefined && object.validatorSet !== null ? ValidatorSet.fromPartial(object.validatorSet) : undefined; + message.trustedHeight = object.trustedHeight !== undefined && object.trustedHeight !== null ? Height.fromPartial(object.trustedHeight) : undefined; + message.trustedValidators = object.trustedValidators !== undefined && object.trustedValidators !== null ? ValidatorSet.fromPartial(object.trustedValidators) : undefined; + return message; + } + +}; + +function createBaseFraction(): Fraction { + return { + numerator: Long.UZERO, + denominator: Long.UZERO + }; +} + +export const Fraction = { + encode(message: Fraction, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.numerator.isZero()) { + writer.uint32(8).uint64(message.numerator); + } + + if (!message.denominator.isZero()) { + writer.uint32(16).uint64(message.denominator); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Fraction { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFraction(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.numerator = (reader.uint64() as Long); + break; + + case 2: + message.denominator = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Fraction { + const message = createBaseFraction(); + message.numerator = object.numerator !== undefined && object.numerator !== null ? Long.fromValue(object.numerator) : Long.UZERO; + message.denominator = object.denominator !== undefined && object.denominator !== null ? Long.fromValue(object.denominator) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/rpc.query.ts b/packages/codegen/src/ibc/rpc.query.ts new file mode 100644 index 00000000..0d0dbcd9 --- /dev/null +++ b/packages/codegen/src/ibc/rpc.query.ts @@ -0,0 +1,89 @@ +import { Tendermint34Client, HttpEndpoint } from "@cosmjs/tendermint-rpc"; +import { QueryClient } from "@cosmjs/stargate"; +export const createRPCQueryClient = async ({ + rpcEndpoint +}: { + rpcEndpoint: string | HttpEndpoint; +}) => { + const tmClient = await Tendermint34Client.connect(rpcEndpoint); + const client = new QueryClient(tmClient); + return { + cosmos: { + app: { + v1alpha1: (await import("../cosmos/app/v1alpha1/query.rpc.Query")).createRpcQueryExtension(client) + }, + auth: { + v1beta1: (await import("../cosmos/auth/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + authz: { + v1beta1: (await import("../cosmos/authz/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + bank: { + v1beta1: (await import("../cosmos/bank/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + base: { + tendermint: { + v1beta1: (await import("../cosmos/base/tendermint/v1beta1/query.rpc.Service")).createRpcQueryExtension(client) + } + }, + distribution: { + v1beta1: (await import("../cosmos/distribution/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + evidence: { + v1beta1: (await import("../cosmos/evidence/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + feegrant: { + v1beta1: (await import("../cosmos/feegrant/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + gov: { + v1: (await import("../cosmos/gov/v1/query.rpc.Query")).createRpcQueryExtension(client), + v1beta1: (await import("../cosmos/gov/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + group: { + v1: (await import("../cosmos/group/v1/query.rpc.Query")).createRpcQueryExtension(client) + }, + mint: { + v1beta1: (await import("../cosmos/mint/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + nft: { + v1beta1: (await import("../cosmos/nft/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + params: { + v1beta1: (await import("../cosmos/params/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + slashing: { + v1beta1: (await import("../cosmos/slashing/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + staking: { + v1beta1: (await import("../cosmos/staking/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + }, + tx: { + v1beta1: (await import("../cosmos/tx/v1beta1/service.rpc.Service")).createRpcQueryExtension(client) + }, + upgrade: { + v1beta1: (await import("../cosmos/upgrade/v1beta1/query.rpc.Query")).createRpcQueryExtension(client) + } + }, + ibc: { + applications: { + transfer: { + v1: (await import("./applications/transfer/v1/query.rpc.Query")).createRpcQueryExtension(client) + } + }, + core: { + channel: { + v1: (await import("./core/channel/v1/query.rpc.Query")).createRpcQueryExtension(client) + }, + client: { + v1: (await import("./core/client/v1/query.rpc.Query")).createRpcQueryExtension(client) + }, + connection: { + v1: (await import("./core/connection/v1/query.rpc.Query")).createRpcQueryExtension(client) + }, + port: { + v1: (await import("./core/port/v1/query.rpc.Query")).createRpcQueryExtension(client) + } + } + } + }; +}; \ No newline at end of file diff --git a/packages/codegen/src/ibc/rpc.tx.ts b/packages/codegen/src/ibc/rpc.tx.ts new file mode 100644 index 00000000..62cb610c --- /dev/null +++ b/packages/codegen/src/ibc/rpc.tx.ts @@ -0,0 +1,67 @@ +import { Rpc } from "../helpers"; +export const createRPCMsgClient = async ({ + rpc +}: { + rpc: Rpc; +}) => ({ + cosmos: { + authz: { + v1beta1: new (await import("../cosmos/authz/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + bank: { + v1beta1: new (await import("../cosmos/bank/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + crisis: { + v1beta1: new (await import("../cosmos/crisis/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + distribution: { + v1beta1: new (await import("../cosmos/distribution/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + evidence: { + v1beta1: new (await import("../cosmos/evidence/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + feegrant: { + v1beta1: new (await import("../cosmos/feegrant/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + gov: { + v1: new (await import("../cosmos/gov/v1/tx.rpc.msg")).MsgClientImpl(rpc), + v1beta1: new (await import("../cosmos/gov/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + group: { + v1: new (await import("../cosmos/group/v1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + nft: { + v1beta1: new (await import("../cosmos/nft/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + slashing: { + v1beta1: new (await import("../cosmos/slashing/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + staking: { + v1beta1: new (await import("../cosmos/staking/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + upgrade: { + v1beta1: new (await import("../cosmos/upgrade/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + vesting: { + v1beta1: new (await import("../cosmos/vesting/v1beta1/tx.rpc.msg")).MsgClientImpl(rpc) + } + }, + ibc: { + applications: { + transfer: { + v1: new (await import("./applications/transfer/v1/tx.rpc.msg")).MsgClientImpl(rpc) + } + }, + core: { + channel: { + v1: new (await import("./core/channel/v1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + client: { + v1: new (await import("./core/client/v1/tx.rpc.msg")).MsgClientImpl(rpc) + }, + connection: { + v1: new (await import("./core/connection/v1/tx.rpc.msg")).MsgClientImpl(rpc) + } + } + } +}); \ No newline at end of file diff --git a/packages/codegen/src/ics23/bundle.ts b/packages/codegen/src/ics23/bundle.ts new file mode 100644 index 00000000..0a8f5bd2 --- /dev/null +++ b/packages/codegen/src/ics23/bundle.ts @@ -0,0 +1,3 @@ +import * as _1 from "../confio/proofs"; +export const ics23 = { ..._1 +}; \ No newline at end of file diff --git a/packages/codegen/src/index.ts b/packages/codegen/src/index.ts new file mode 100644 index 00000000..30d7b52e --- /dev/null +++ b/packages/codegen/src/index.ts @@ -0,0 +1,15 @@ +/** + * This file and any referenced files were automatically generated by @osmonauts/telescope@0.88.2 + * DO NOT MODIFY BY HAND. Instead, download the latest proto files for your chain + * and run the transpile command or yarn proto command to regenerate this bundle. + */ + +export * from "./amino/bundle"; +export * from "./ics23/bundle"; +export * from "./cosmos_proto/bundle"; +export * from "./cosmos/bundle"; +export * from "./cosmwasm/bundle"; +export * from "./gogoproto/bundle"; +export * from "./google/bundle"; +export * from "./ibc/bundle"; +export * from "./tendermint/bundle"; \ No newline at end of file diff --git a/packages/codegen/src/tendermint/abci/types.ts b/packages/codegen/src/tendermint/abci/types.ts new file mode 100644 index 00000000..2bc18c1a --- /dev/null +++ b/packages/codegen/src/tendermint/abci/types.ts @@ -0,0 +1,3846 @@ +import { Timestamp } from "../../google/protobuf/timestamp"; +import { Header, HeaderSDKType } from "../types/types"; +import { ProofOps, ProofOpsSDKType } from "../crypto/proof"; +import { EvidenceParams, EvidenceParamsSDKType, ValidatorParams, ValidatorParamsSDKType, VersionParams, VersionParamsSDKType } from "../types/params"; +import { PublicKey, PublicKeySDKType } from "../crypto/keys"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long, toTimestamp, fromTimestamp } from "../../helpers"; +export enum CheckTxType { + NEW = 0, + RECHECK = 1, + UNRECOGNIZED = -1, +} +export const CheckTxTypeSDKType = CheckTxType; +export function checkTxTypeFromJSON(object: any): CheckTxType { + switch (object) { + case 0: + case "NEW": + return CheckTxType.NEW; + + case 1: + case "RECHECK": + return CheckTxType.RECHECK; + + case -1: + case "UNRECOGNIZED": + default: + return CheckTxType.UNRECOGNIZED; + } +} +export function checkTxTypeToJSON(object: CheckTxType): string { + switch (object) { + case CheckTxType.NEW: + return "NEW"; + + case CheckTxType.RECHECK: + return "RECHECK"; + + case CheckTxType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +export enum ResponseOfferSnapshot_Result { + /** UNKNOWN - Unknown result, abort all snapshot restoration */ + UNKNOWN = 0, + + /** ACCEPT - Snapshot accepted, apply chunks */ + ACCEPT = 1, + + /** ABORT - Abort all snapshot restoration */ + ABORT = 2, + + /** REJECT - Reject this specific snapshot, try others */ + REJECT = 3, + + /** REJECT_FORMAT - Reject all snapshots of this format, try others */ + REJECT_FORMAT = 4, + + /** REJECT_SENDER - Reject all snapshots from the sender(s), try others */ + REJECT_SENDER = 5, + UNRECOGNIZED = -1, +} +export const ResponseOfferSnapshot_ResultSDKType = ResponseOfferSnapshot_Result; +export function responseOfferSnapshot_ResultFromJSON(object: any): ResponseOfferSnapshot_Result { + switch (object) { + case 0: + case "UNKNOWN": + return ResponseOfferSnapshot_Result.UNKNOWN; + + case 1: + case "ACCEPT": + return ResponseOfferSnapshot_Result.ACCEPT; + + case 2: + case "ABORT": + return ResponseOfferSnapshot_Result.ABORT; + + case 3: + case "REJECT": + return ResponseOfferSnapshot_Result.REJECT; + + case 4: + case "REJECT_FORMAT": + return ResponseOfferSnapshot_Result.REJECT_FORMAT; + + case 5: + case "REJECT_SENDER": + return ResponseOfferSnapshot_Result.REJECT_SENDER; + + case -1: + case "UNRECOGNIZED": + default: + return ResponseOfferSnapshot_Result.UNRECOGNIZED; + } +} +export function responseOfferSnapshot_ResultToJSON(object: ResponseOfferSnapshot_Result): string { + switch (object) { + case ResponseOfferSnapshot_Result.UNKNOWN: + return "UNKNOWN"; + + case ResponseOfferSnapshot_Result.ACCEPT: + return "ACCEPT"; + + case ResponseOfferSnapshot_Result.ABORT: + return "ABORT"; + + case ResponseOfferSnapshot_Result.REJECT: + return "REJECT"; + + case ResponseOfferSnapshot_Result.REJECT_FORMAT: + return "REJECT_FORMAT"; + + case ResponseOfferSnapshot_Result.REJECT_SENDER: + return "REJECT_SENDER"; + + case ResponseOfferSnapshot_Result.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +export enum ResponseApplySnapshotChunk_Result { + /** UNKNOWN - Unknown result, abort all snapshot restoration */ + UNKNOWN = 0, + + /** ACCEPT - Chunk successfully accepted */ + ACCEPT = 1, + + /** ABORT - Abort all snapshot restoration */ + ABORT = 2, + + /** RETRY - Retry chunk (combine with refetch and reject) */ + RETRY = 3, + + /** RETRY_SNAPSHOT - Retry snapshot (combine with refetch and reject) */ + RETRY_SNAPSHOT = 4, + + /** REJECT_SNAPSHOT - Reject this snapshot, try others */ + REJECT_SNAPSHOT = 5, + UNRECOGNIZED = -1, +} +export const ResponseApplySnapshotChunk_ResultSDKType = ResponseApplySnapshotChunk_Result; +export function responseApplySnapshotChunk_ResultFromJSON(object: any): ResponseApplySnapshotChunk_Result { + switch (object) { + case 0: + case "UNKNOWN": + return ResponseApplySnapshotChunk_Result.UNKNOWN; + + case 1: + case "ACCEPT": + return ResponseApplySnapshotChunk_Result.ACCEPT; + + case 2: + case "ABORT": + return ResponseApplySnapshotChunk_Result.ABORT; + + case 3: + case "RETRY": + return ResponseApplySnapshotChunk_Result.RETRY; + + case 4: + case "RETRY_SNAPSHOT": + return ResponseApplySnapshotChunk_Result.RETRY_SNAPSHOT; + + case 5: + case "REJECT_SNAPSHOT": + return ResponseApplySnapshotChunk_Result.REJECT_SNAPSHOT; + + case -1: + case "UNRECOGNIZED": + default: + return ResponseApplySnapshotChunk_Result.UNRECOGNIZED; + } +} +export function responseApplySnapshotChunk_ResultToJSON(object: ResponseApplySnapshotChunk_Result): string { + switch (object) { + case ResponseApplySnapshotChunk_Result.UNKNOWN: + return "UNKNOWN"; + + case ResponseApplySnapshotChunk_Result.ACCEPT: + return "ACCEPT"; + + case ResponseApplySnapshotChunk_Result.ABORT: + return "ABORT"; + + case ResponseApplySnapshotChunk_Result.RETRY: + return "RETRY"; + + case ResponseApplySnapshotChunk_Result.RETRY_SNAPSHOT: + return "RETRY_SNAPSHOT"; + + case ResponseApplySnapshotChunk_Result.REJECT_SNAPSHOT: + return "REJECT_SNAPSHOT"; + + case ResponseApplySnapshotChunk_Result.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +export enum EvidenceType { + UNKNOWN = 0, + DUPLICATE_VOTE = 1, + LIGHT_CLIENT_ATTACK = 2, + UNRECOGNIZED = -1, +} +export const EvidenceTypeSDKType = EvidenceType; +export function evidenceTypeFromJSON(object: any): EvidenceType { + switch (object) { + case 0: + case "UNKNOWN": + return EvidenceType.UNKNOWN; + + case 1: + case "DUPLICATE_VOTE": + return EvidenceType.DUPLICATE_VOTE; + + case 2: + case "LIGHT_CLIENT_ATTACK": + return EvidenceType.LIGHT_CLIENT_ATTACK; + + case -1: + case "UNRECOGNIZED": + default: + return EvidenceType.UNRECOGNIZED; + } +} +export function evidenceTypeToJSON(object: EvidenceType): string { + switch (object) { + case EvidenceType.UNKNOWN: + return "UNKNOWN"; + + case EvidenceType.DUPLICATE_VOTE: + return "DUPLICATE_VOTE"; + + case EvidenceType.LIGHT_CLIENT_ATTACK: + return "LIGHT_CLIENT_ATTACK"; + + case EvidenceType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +export interface Request { + echo?: RequestEcho; + flush?: RequestFlush; + info?: RequestInfo; + setOption?: RequestSetOption; + initChain?: RequestInitChain; + query?: RequestQuery; + beginBlock?: RequestBeginBlock; + checkTx?: RequestCheckTx; + deliverTx?: RequestDeliverTx; + endBlock?: RequestEndBlock; + commit?: RequestCommit; + listSnapshots?: RequestListSnapshots; + offerSnapshot?: RequestOfferSnapshot; + loadSnapshotChunk?: RequestLoadSnapshotChunk; + applySnapshotChunk?: RequestApplySnapshotChunk; +} +export interface RequestSDKType { + echo?: RequestEchoSDKType; + flush?: RequestFlushSDKType; + info?: RequestInfoSDKType; + set_option?: RequestSetOptionSDKType; + init_chain?: RequestInitChainSDKType; + query?: RequestQuerySDKType; + begin_block?: RequestBeginBlockSDKType; + check_tx?: RequestCheckTxSDKType; + deliver_tx?: RequestDeliverTxSDKType; + end_block?: RequestEndBlockSDKType; + commit?: RequestCommitSDKType; + list_snapshots?: RequestListSnapshotsSDKType; + offer_snapshot?: RequestOfferSnapshotSDKType; + load_snapshot_chunk?: RequestLoadSnapshotChunkSDKType; + apply_snapshot_chunk?: RequestApplySnapshotChunkSDKType; +} +export interface RequestEcho { + message: string; +} +export interface RequestEchoSDKType { + message: string; +} +export interface RequestFlush {} +export interface RequestFlushSDKType {} +export interface RequestInfo { + version: string; + blockVersion: Long; + p2pVersion: Long; +} +export interface RequestInfoSDKType { + version: string; + block_version: Long; + p2p_version: Long; +} +/** nondeterministic */ + +export interface RequestSetOption { + key: string; + value: string; +} +/** nondeterministic */ + +export interface RequestSetOptionSDKType { + key: string; + value: string; +} +export interface RequestInitChain { + time?: Date; + chainId: string; + consensusParams?: ConsensusParams; + validators: ValidatorUpdate[]; + appStateBytes: Uint8Array; + initialHeight: Long; +} +export interface RequestInitChainSDKType { + time?: Date; + chain_id: string; + consensus_params?: ConsensusParamsSDKType; + validators: ValidatorUpdateSDKType[]; + app_state_bytes: Uint8Array; + initial_height: Long; +} +export interface RequestQuery { + data: Uint8Array; + path: string; + height: Long; + prove: boolean; +} +export interface RequestQuerySDKType { + data: Uint8Array; + path: string; + height: Long; + prove: boolean; +} +export interface RequestBeginBlock { + hash: Uint8Array; + header?: Header; + lastCommitInfo?: LastCommitInfo; + byzantineValidators: Evidence[]; +} +export interface RequestBeginBlockSDKType { + hash: Uint8Array; + header?: HeaderSDKType; + last_commit_info?: LastCommitInfoSDKType; + byzantine_validators: EvidenceSDKType[]; +} +export interface RequestCheckTx { + tx: Uint8Array; + type: CheckTxType; +} +export interface RequestCheckTxSDKType { + tx: Uint8Array; + type: CheckTxType; +} +export interface RequestDeliverTx { + tx: Uint8Array; +} +export interface RequestDeliverTxSDKType { + tx: Uint8Array; +} +export interface RequestEndBlock { + height: Long; +} +export interface RequestEndBlockSDKType { + height: Long; +} +export interface RequestCommit {} +export interface RequestCommitSDKType {} +/** lists available snapshots */ + +export interface RequestListSnapshots {} +/** lists available snapshots */ + +export interface RequestListSnapshotsSDKType {} +/** offers a snapshot to the application */ + +export interface RequestOfferSnapshot { + /** snapshot offered by peers */ + snapshot?: Snapshot; + /** light client-verified app hash for snapshot height */ + + appHash: Uint8Array; +} +/** offers a snapshot to the application */ + +export interface RequestOfferSnapshotSDKType { + snapshot?: SnapshotSDKType; + app_hash: Uint8Array; +} +/** loads a snapshot chunk */ + +export interface RequestLoadSnapshotChunk { + height: Long; + format: number; + chunk: number; +} +/** loads a snapshot chunk */ + +export interface RequestLoadSnapshotChunkSDKType { + height: Long; + format: number; + chunk: number; +} +/** Applies a snapshot chunk */ + +export interface RequestApplySnapshotChunk { + index: number; + chunk: Uint8Array; + sender: string; +} +/** Applies a snapshot chunk */ + +export interface RequestApplySnapshotChunkSDKType { + index: number; + chunk: Uint8Array; + sender: string; +} +export interface Response { + exception?: ResponseException; + echo?: ResponseEcho; + flush?: ResponseFlush; + info?: ResponseInfo; + setOption?: ResponseSetOption; + initChain?: ResponseInitChain; + query?: ResponseQuery; + beginBlock?: ResponseBeginBlock; + checkTx?: ResponseCheckTx; + deliverTx?: ResponseDeliverTx; + endBlock?: ResponseEndBlock; + commit?: ResponseCommit; + listSnapshots?: ResponseListSnapshots; + offerSnapshot?: ResponseOfferSnapshot; + loadSnapshotChunk?: ResponseLoadSnapshotChunk; + applySnapshotChunk?: ResponseApplySnapshotChunk; +} +export interface ResponseSDKType { + exception?: ResponseExceptionSDKType; + echo?: ResponseEchoSDKType; + flush?: ResponseFlushSDKType; + info?: ResponseInfoSDKType; + set_option?: ResponseSetOptionSDKType; + init_chain?: ResponseInitChainSDKType; + query?: ResponseQuerySDKType; + begin_block?: ResponseBeginBlockSDKType; + check_tx?: ResponseCheckTxSDKType; + deliver_tx?: ResponseDeliverTxSDKType; + end_block?: ResponseEndBlockSDKType; + commit?: ResponseCommitSDKType; + list_snapshots?: ResponseListSnapshotsSDKType; + offer_snapshot?: ResponseOfferSnapshotSDKType; + load_snapshot_chunk?: ResponseLoadSnapshotChunkSDKType; + apply_snapshot_chunk?: ResponseApplySnapshotChunkSDKType; +} +/** nondeterministic */ + +export interface ResponseException { + error: string; +} +/** nondeterministic */ + +export interface ResponseExceptionSDKType { + error: string; +} +export interface ResponseEcho { + message: string; +} +export interface ResponseEchoSDKType { + message: string; +} +export interface ResponseFlush {} +export interface ResponseFlushSDKType {} +export interface ResponseInfo { + data: string; + version: string; + appVersion: Long; + lastBlockHeight: Long; + lastBlockAppHash: Uint8Array; +} +export interface ResponseInfoSDKType { + data: string; + version: string; + app_version: Long; + last_block_height: Long; + last_block_app_hash: Uint8Array; +} +/** nondeterministic */ + +export interface ResponseSetOption { + code: number; + /** bytes data = 2; */ + + log: string; + info: string; +} +/** nondeterministic */ + +export interface ResponseSetOptionSDKType { + code: number; + log: string; + info: string; +} +export interface ResponseInitChain { + consensusParams?: ConsensusParams; + validators: ValidatorUpdate[]; + appHash: Uint8Array; +} +export interface ResponseInitChainSDKType { + consensus_params?: ConsensusParamsSDKType; + validators: ValidatorUpdateSDKType[]; + app_hash: Uint8Array; +} +export interface ResponseQuery { + code: number; + /** bytes data = 2; // use "value" instead. */ + + log: string; + /** nondeterministic */ + + info: string; + index: Long; + key: Uint8Array; + value: Uint8Array; + proofOps?: ProofOps; + height: Long; + codespace: string; +} +export interface ResponseQuerySDKType { + code: number; + log: string; + info: string; + index: Long; + key: Uint8Array; + value: Uint8Array; + proof_ops?: ProofOpsSDKType; + height: Long; + codespace: string; +} +export interface ResponseBeginBlock { + events: Event[]; +} +export interface ResponseBeginBlockSDKType { + events: EventSDKType[]; +} +export interface ResponseCheckTx { + code: number; + data: Uint8Array; + /** nondeterministic */ + + log: string; + /** nondeterministic */ + + info: string; + gasWanted: Long; + gasUsed: Long; + events: Event[]; + codespace: string; +} +export interface ResponseCheckTxSDKType { + code: number; + data: Uint8Array; + log: string; + info: string; + gas_wanted: Long; + gas_used: Long; + events: EventSDKType[]; + codespace: string; +} +export interface ResponseDeliverTx { + code: number; + data: Uint8Array; + /** nondeterministic */ + + log: string; + /** nondeterministic */ + + info: string; + gasWanted: Long; + gasUsed: Long; + events: Event[]; + codespace: string; +} +export interface ResponseDeliverTxSDKType { + code: number; + data: Uint8Array; + log: string; + info: string; + gas_wanted: Long; + gas_used: Long; + events: EventSDKType[]; + codespace: string; +} +export interface ResponseEndBlock { + validatorUpdates: ValidatorUpdate[]; + consensusParamUpdates?: ConsensusParams; + events: Event[]; +} +export interface ResponseEndBlockSDKType { + validator_updates: ValidatorUpdateSDKType[]; + consensus_param_updates?: ConsensusParamsSDKType; + events: EventSDKType[]; +} +export interface ResponseCommit { + /** reserve 1 */ + data: Uint8Array; + retainHeight: Long; +} +export interface ResponseCommitSDKType { + data: Uint8Array; + retain_height: Long; +} +export interface ResponseListSnapshots { + snapshots: Snapshot[]; +} +export interface ResponseListSnapshotsSDKType { + snapshots: SnapshotSDKType[]; +} +export interface ResponseOfferSnapshot { + result: ResponseOfferSnapshot_Result; +} +export interface ResponseOfferSnapshotSDKType { + result: ResponseOfferSnapshot_Result; +} +export interface ResponseLoadSnapshotChunk { + chunk: Uint8Array; +} +export interface ResponseLoadSnapshotChunkSDKType { + chunk: Uint8Array; +} +export interface ResponseApplySnapshotChunk { + result: ResponseApplySnapshotChunk_Result; + /** Chunks to refetch and reapply */ + + refetchChunks: number[]; + /** Chunk senders to reject and ban */ + + rejectSenders: string[]; +} +export interface ResponseApplySnapshotChunkSDKType { + result: ResponseApplySnapshotChunk_Result; + refetch_chunks: number[]; + reject_senders: string[]; +} +/** + * ConsensusParams contains all consensus-relevant parameters + * that can be adjusted by the abci app + */ + +export interface ConsensusParams { + block?: BlockParams; + evidence?: EvidenceParams; + validator?: ValidatorParams; + version?: VersionParams; +} +/** + * ConsensusParams contains all consensus-relevant parameters + * that can be adjusted by the abci app + */ + +export interface ConsensusParamsSDKType { + block?: BlockParamsSDKType; + evidence?: EvidenceParamsSDKType; + validator?: ValidatorParamsSDKType; + version?: VersionParamsSDKType; +} +/** BlockParams contains limits on the block size. */ + +export interface BlockParams { + /** Note: must be greater than 0 */ + maxBytes: Long; + /** Note: must be greater or equal to -1 */ + + maxGas: Long; +} +/** BlockParams contains limits on the block size. */ + +export interface BlockParamsSDKType { + max_bytes: Long; + max_gas: Long; +} +export interface LastCommitInfo { + round: number; + votes: VoteInfo[]; +} +export interface LastCommitInfoSDKType { + round: number; + votes: VoteInfoSDKType[]; +} +/** + * Event allows application developers to attach additional information to + * ResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx. + * Later, transactions may be queried using these events. + */ + +export interface Event { + type: string; + attributes: EventAttribute[]; +} +/** + * Event allows application developers to attach additional information to + * ResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx. + * Later, transactions may be queried using these events. + */ + +export interface EventSDKType { + type: string; + attributes: EventAttributeSDKType[]; +} +/** EventAttribute is a single key-value pair, associated with an event. */ + +export interface EventAttribute { + key: Uint8Array; + value: Uint8Array; + /** nondeterministic */ + + index: boolean; +} +/** EventAttribute is a single key-value pair, associated with an event. */ + +export interface EventAttributeSDKType { + key: Uint8Array; + value: Uint8Array; + index: boolean; +} +/** + * TxResult contains results of executing the transaction. + * + * One usage is indexing transaction results. + */ + +export interface TxResult { + height: Long; + index: number; + tx: Uint8Array; + result?: ResponseDeliverTx; +} +/** + * TxResult contains results of executing the transaction. + * + * One usage is indexing transaction results. + */ + +export interface TxResultSDKType { + height: Long; + index: number; + tx: Uint8Array; + result?: ResponseDeliverTxSDKType; +} +/** Validator */ + +export interface Validator { + /** + * The first 20 bytes of SHA256(public key) + * PubKey pub_key = 2 [(gogoproto.nullable)=false]; + */ + address: Uint8Array; + /** The voting power */ + + power: Long; +} +/** Validator */ + +export interface ValidatorSDKType { + address: Uint8Array; + power: Long; +} +/** ValidatorUpdate */ + +export interface ValidatorUpdate { + pubKey?: PublicKey; + power: Long; +} +/** ValidatorUpdate */ + +export interface ValidatorUpdateSDKType { + pub_key?: PublicKeySDKType; + power: Long; +} +/** VoteInfo */ + +export interface VoteInfo { + validator?: Validator; + signedLastBlock: boolean; +} +/** VoteInfo */ + +export interface VoteInfoSDKType { + validator?: ValidatorSDKType; + signed_last_block: boolean; +} +export interface Evidence { + type: EvidenceType; + /** The offending validator */ + + validator?: Validator; + /** The height when the offense occurred */ + + height: Long; + /** The corresponding time where the offense occurred */ + + time?: Date; + /** + * Total voting power of the validator set in case the ABCI application does + * not store historical validators. + * https://github.com/tendermint/tendermint/issues/4581 + */ + + totalVotingPower: Long; +} +export interface EvidenceSDKType { + type: EvidenceType; + validator?: ValidatorSDKType; + height: Long; + time?: Date; + total_voting_power: Long; +} +export interface Snapshot { + /** The height at which the snapshot was taken */ + height: Long; + /** The application-specific snapshot format */ + + format: number; + /** Number of chunks in the snapshot */ + + chunks: number; + /** Arbitrary snapshot hash, equal only if identical */ + + hash: Uint8Array; + /** Arbitrary application metadata */ + + metadata: Uint8Array; +} +export interface SnapshotSDKType { + height: Long; + format: number; + chunks: number; + hash: Uint8Array; + metadata: Uint8Array; +} + +function createBaseRequest(): Request { + return { + echo: undefined, + flush: undefined, + info: undefined, + setOption: undefined, + initChain: undefined, + query: undefined, + beginBlock: undefined, + checkTx: undefined, + deliverTx: undefined, + endBlock: undefined, + commit: undefined, + listSnapshots: undefined, + offerSnapshot: undefined, + loadSnapshotChunk: undefined, + applySnapshotChunk: undefined + }; +} + +export const Request = { + encode(message: Request, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.echo !== undefined) { + RequestEcho.encode(message.echo, writer.uint32(10).fork()).ldelim(); + } + + if (message.flush !== undefined) { + RequestFlush.encode(message.flush, writer.uint32(18).fork()).ldelim(); + } + + if (message.info !== undefined) { + RequestInfo.encode(message.info, writer.uint32(26).fork()).ldelim(); + } + + if (message.setOption !== undefined) { + RequestSetOption.encode(message.setOption, writer.uint32(34).fork()).ldelim(); + } + + if (message.initChain !== undefined) { + RequestInitChain.encode(message.initChain, writer.uint32(42).fork()).ldelim(); + } + + if (message.query !== undefined) { + RequestQuery.encode(message.query, writer.uint32(50).fork()).ldelim(); + } + + if (message.beginBlock !== undefined) { + RequestBeginBlock.encode(message.beginBlock, writer.uint32(58).fork()).ldelim(); + } + + if (message.checkTx !== undefined) { + RequestCheckTx.encode(message.checkTx, writer.uint32(66).fork()).ldelim(); + } + + if (message.deliverTx !== undefined) { + RequestDeliverTx.encode(message.deliverTx, writer.uint32(74).fork()).ldelim(); + } + + if (message.endBlock !== undefined) { + RequestEndBlock.encode(message.endBlock, writer.uint32(82).fork()).ldelim(); + } + + if (message.commit !== undefined) { + RequestCommit.encode(message.commit, writer.uint32(90).fork()).ldelim(); + } + + if (message.listSnapshots !== undefined) { + RequestListSnapshots.encode(message.listSnapshots, writer.uint32(98).fork()).ldelim(); + } + + if (message.offerSnapshot !== undefined) { + RequestOfferSnapshot.encode(message.offerSnapshot, writer.uint32(106).fork()).ldelim(); + } + + if (message.loadSnapshotChunk !== undefined) { + RequestLoadSnapshotChunk.encode(message.loadSnapshotChunk, writer.uint32(114).fork()).ldelim(); + } + + if (message.applySnapshotChunk !== undefined) { + RequestApplySnapshotChunk.encode(message.applySnapshotChunk, writer.uint32(122).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Request { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequest(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.echo = RequestEcho.decode(reader, reader.uint32()); + break; + + case 2: + message.flush = RequestFlush.decode(reader, reader.uint32()); + break; + + case 3: + message.info = RequestInfo.decode(reader, reader.uint32()); + break; + + case 4: + message.setOption = RequestSetOption.decode(reader, reader.uint32()); + break; + + case 5: + message.initChain = RequestInitChain.decode(reader, reader.uint32()); + break; + + case 6: + message.query = RequestQuery.decode(reader, reader.uint32()); + break; + + case 7: + message.beginBlock = RequestBeginBlock.decode(reader, reader.uint32()); + break; + + case 8: + message.checkTx = RequestCheckTx.decode(reader, reader.uint32()); + break; + + case 9: + message.deliverTx = RequestDeliverTx.decode(reader, reader.uint32()); + break; + + case 10: + message.endBlock = RequestEndBlock.decode(reader, reader.uint32()); + break; + + case 11: + message.commit = RequestCommit.decode(reader, reader.uint32()); + break; + + case 12: + message.listSnapshots = RequestListSnapshots.decode(reader, reader.uint32()); + break; + + case 13: + message.offerSnapshot = RequestOfferSnapshot.decode(reader, reader.uint32()); + break; + + case 14: + message.loadSnapshotChunk = RequestLoadSnapshotChunk.decode(reader, reader.uint32()); + break; + + case 15: + message.applySnapshotChunk = RequestApplySnapshotChunk.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Request { + const message = createBaseRequest(); + message.echo = object.echo !== undefined && object.echo !== null ? RequestEcho.fromPartial(object.echo) : undefined; + message.flush = object.flush !== undefined && object.flush !== null ? RequestFlush.fromPartial(object.flush) : undefined; + message.info = object.info !== undefined && object.info !== null ? RequestInfo.fromPartial(object.info) : undefined; + message.setOption = object.setOption !== undefined && object.setOption !== null ? RequestSetOption.fromPartial(object.setOption) : undefined; + message.initChain = object.initChain !== undefined && object.initChain !== null ? RequestInitChain.fromPartial(object.initChain) : undefined; + message.query = object.query !== undefined && object.query !== null ? RequestQuery.fromPartial(object.query) : undefined; + message.beginBlock = object.beginBlock !== undefined && object.beginBlock !== null ? RequestBeginBlock.fromPartial(object.beginBlock) : undefined; + message.checkTx = object.checkTx !== undefined && object.checkTx !== null ? RequestCheckTx.fromPartial(object.checkTx) : undefined; + message.deliverTx = object.deliverTx !== undefined && object.deliverTx !== null ? RequestDeliverTx.fromPartial(object.deliverTx) : undefined; + message.endBlock = object.endBlock !== undefined && object.endBlock !== null ? RequestEndBlock.fromPartial(object.endBlock) : undefined; + message.commit = object.commit !== undefined && object.commit !== null ? RequestCommit.fromPartial(object.commit) : undefined; + message.listSnapshots = object.listSnapshots !== undefined && object.listSnapshots !== null ? RequestListSnapshots.fromPartial(object.listSnapshots) : undefined; + message.offerSnapshot = object.offerSnapshot !== undefined && object.offerSnapshot !== null ? RequestOfferSnapshot.fromPartial(object.offerSnapshot) : undefined; + message.loadSnapshotChunk = object.loadSnapshotChunk !== undefined && object.loadSnapshotChunk !== null ? RequestLoadSnapshotChunk.fromPartial(object.loadSnapshotChunk) : undefined; + message.applySnapshotChunk = object.applySnapshotChunk !== undefined && object.applySnapshotChunk !== null ? RequestApplySnapshotChunk.fromPartial(object.applySnapshotChunk) : undefined; + return message; + } + +}; + +function createBaseRequestEcho(): RequestEcho { + return { + message: "" + }; +} + +export const RequestEcho = { + encode(message: RequestEcho, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.message !== "") { + writer.uint32(10).string(message.message); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestEcho { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestEcho(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.message = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RequestEcho { + const message = createBaseRequestEcho(); + message.message = object.message ?? ""; + return message; + } + +}; + +function createBaseRequestFlush(): RequestFlush { + return {}; +} + +export const RequestFlush = { + encode(_: RequestFlush, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestFlush { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestFlush(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): RequestFlush { + const message = createBaseRequestFlush(); + return message; + } + +}; + +function createBaseRequestInfo(): RequestInfo { + return { + version: "", + blockVersion: Long.UZERO, + p2pVersion: Long.UZERO + }; +} + +export const RequestInfo = { + encode(message: RequestInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.version !== "") { + writer.uint32(10).string(message.version); + } + + if (!message.blockVersion.isZero()) { + writer.uint32(16).uint64(message.blockVersion); + } + + if (!message.p2pVersion.isZero()) { + writer.uint32(24).uint64(message.p2pVersion); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.version = reader.string(); + break; + + case 2: + message.blockVersion = (reader.uint64() as Long); + break; + + case 3: + message.p2pVersion = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RequestInfo { + const message = createBaseRequestInfo(); + message.version = object.version ?? ""; + message.blockVersion = object.blockVersion !== undefined && object.blockVersion !== null ? Long.fromValue(object.blockVersion) : Long.UZERO; + message.p2pVersion = object.p2pVersion !== undefined && object.p2pVersion !== null ? Long.fromValue(object.p2pVersion) : Long.UZERO; + return message; + } + +}; + +function createBaseRequestSetOption(): RequestSetOption { + return { + key: "", + value: "" + }; +} + +export const RequestSetOption = { + encode(message: RequestSetOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestSetOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestSetOption(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.string(); + break; + + case 2: + message.value = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RequestSetOption { + const message = createBaseRequestSetOption(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + } + +}; + +function createBaseRequestInitChain(): RequestInitChain { + return { + time: undefined, + chainId: "", + consensusParams: undefined, + validators: [], + appStateBytes: new Uint8Array(), + initialHeight: Long.ZERO + }; +} + +export const RequestInitChain = { + encode(message: RequestInitChain, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(10).fork()).ldelim(); + } + + if (message.chainId !== "") { + writer.uint32(18).string(message.chainId); + } + + if (message.consensusParams !== undefined) { + ConsensusParams.encode(message.consensusParams, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.validators) { + ValidatorUpdate.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + if (message.appStateBytes.length !== 0) { + writer.uint32(42).bytes(message.appStateBytes); + } + + if (!message.initialHeight.isZero()) { + writer.uint32(48).int64(message.initialHeight); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestInitChain { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestInitChain(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 2: + message.chainId = reader.string(); + break; + + case 3: + message.consensusParams = ConsensusParams.decode(reader, reader.uint32()); + break; + + case 4: + message.validators.push(ValidatorUpdate.decode(reader, reader.uint32())); + break; + + case 5: + message.appStateBytes = reader.bytes(); + break; + + case 6: + message.initialHeight = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RequestInitChain { + const message = createBaseRequestInitChain(); + message.time = object.time ?? undefined; + message.chainId = object.chainId ?? ""; + message.consensusParams = object.consensusParams !== undefined && object.consensusParams !== null ? ConsensusParams.fromPartial(object.consensusParams) : undefined; + message.validators = object.validators?.map(e => ValidatorUpdate.fromPartial(e)) || []; + message.appStateBytes = object.appStateBytes ?? new Uint8Array(); + message.initialHeight = object.initialHeight !== undefined && object.initialHeight !== null ? Long.fromValue(object.initialHeight) : Long.ZERO; + return message; + } + +}; + +function createBaseRequestQuery(): RequestQuery { + return { + data: new Uint8Array(), + path: "", + height: Long.ZERO, + prove: false + }; +} + +export const RequestQuery = { + encode(message: RequestQuery, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data.length !== 0) { + writer.uint32(10).bytes(message.data); + } + + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + + if (!message.height.isZero()) { + writer.uint32(24).int64(message.height); + } + + if (message.prove === true) { + writer.uint32(32).bool(message.prove); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestQuery { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestQuery(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.data = reader.bytes(); + break; + + case 2: + message.path = reader.string(); + break; + + case 3: + message.height = (reader.int64() as Long); + break; + + case 4: + message.prove = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RequestQuery { + const message = createBaseRequestQuery(); + message.data = object.data ?? new Uint8Array(); + message.path = object.path ?? ""; + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.prove = object.prove ?? false; + return message; + } + +}; + +function createBaseRequestBeginBlock(): RequestBeginBlock { + return { + hash: new Uint8Array(), + header: undefined, + lastCommitInfo: undefined, + byzantineValidators: [] + }; +} + +export const RequestBeginBlock = { + encode(message: RequestBeginBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hash.length !== 0) { + writer.uint32(10).bytes(message.hash); + } + + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(18).fork()).ldelim(); + } + + if (message.lastCommitInfo !== undefined) { + LastCommitInfo.encode(message.lastCommitInfo, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.byzantineValidators) { + Evidence.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestBeginBlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestBeginBlock(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.hash = reader.bytes(); + break; + + case 2: + message.header = Header.decode(reader, reader.uint32()); + break; + + case 3: + message.lastCommitInfo = LastCommitInfo.decode(reader, reader.uint32()); + break; + + case 4: + message.byzantineValidators.push(Evidence.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RequestBeginBlock { + const message = createBaseRequestBeginBlock(); + message.hash = object.hash ?? new Uint8Array(); + message.header = object.header !== undefined && object.header !== null ? Header.fromPartial(object.header) : undefined; + message.lastCommitInfo = object.lastCommitInfo !== undefined && object.lastCommitInfo !== null ? LastCommitInfo.fromPartial(object.lastCommitInfo) : undefined; + message.byzantineValidators = object.byzantineValidators?.map(e => Evidence.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseRequestCheckTx(): RequestCheckTx { + return { + tx: new Uint8Array(), + type: 0 + }; +} + +export const RequestCheckTx = { + encode(message: RequestCheckTx, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tx.length !== 0) { + writer.uint32(10).bytes(message.tx); + } + + if (message.type !== 0) { + writer.uint32(16).int32(message.type); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestCheckTx { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestCheckTx(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.tx = reader.bytes(); + break; + + case 2: + message.type = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RequestCheckTx { + const message = createBaseRequestCheckTx(); + message.tx = object.tx ?? new Uint8Array(); + message.type = object.type ?? 0; + return message; + } + +}; + +function createBaseRequestDeliverTx(): RequestDeliverTx { + return { + tx: new Uint8Array() + }; +} + +export const RequestDeliverTx = { + encode(message: RequestDeliverTx, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tx.length !== 0) { + writer.uint32(10).bytes(message.tx); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestDeliverTx { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestDeliverTx(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.tx = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RequestDeliverTx { + const message = createBaseRequestDeliverTx(); + message.tx = object.tx ?? new Uint8Array(); + return message; + } + +}; + +function createBaseRequestEndBlock(): RequestEndBlock { + return { + height: Long.ZERO + }; +} + +export const RequestEndBlock = { + encode(message: RequestEndBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).int64(message.height); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestEndBlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestEndBlock(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RequestEndBlock { + const message = createBaseRequestEndBlock(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + return message; + } + +}; + +function createBaseRequestCommit(): RequestCommit { + return {}; +} + +export const RequestCommit = { + encode(_: RequestCommit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestCommit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestCommit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): RequestCommit { + const message = createBaseRequestCommit(); + return message; + } + +}; + +function createBaseRequestListSnapshots(): RequestListSnapshots { + return {}; +} + +export const RequestListSnapshots = { + encode(_: RequestListSnapshots, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestListSnapshots { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestListSnapshots(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): RequestListSnapshots { + const message = createBaseRequestListSnapshots(); + return message; + } + +}; + +function createBaseRequestOfferSnapshot(): RequestOfferSnapshot { + return { + snapshot: undefined, + appHash: new Uint8Array() + }; +} + +export const RequestOfferSnapshot = { + encode(message: RequestOfferSnapshot, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.snapshot !== undefined) { + Snapshot.encode(message.snapshot, writer.uint32(10).fork()).ldelim(); + } + + if (message.appHash.length !== 0) { + writer.uint32(18).bytes(message.appHash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestOfferSnapshot { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestOfferSnapshot(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.snapshot = Snapshot.decode(reader, reader.uint32()); + break; + + case 2: + message.appHash = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RequestOfferSnapshot { + const message = createBaseRequestOfferSnapshot(); + message.snapshot = object.snapshot !== undefined && object.snapshot !== null ? Snapshot.fromPartial(object.snapshot) : undefined; + message.appHash = object.appHash ?? new Uint8Array(); + return message; + } + +}; + +function createBaseRequestLoadSnapshotChunk(): RequestLoadSnapshotChunk { + return { + height: Long.UZERO, + format: 0, + chunk: 0 + }; +} + +export const RequestLoadSnapshotChunk = { + encode(message: RequestLoadSnapshotChunk, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).uint64(message.height); + } + + if (message.format !== 0) { + writer.uint32(16).uint32(message.format); + } + + if (message.chunk !== 0) { + writer.uint32(24).uint32(message.chunk); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestLoadSnapshotChunk { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestLoadSnapshotChunk(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.uint64() as Long); + break; + + case 2: + message.format = reader.uint32(); + break; + + case 3: + message.chunk = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RequestLoadSnapshotChunk { + const message = createBaseRequestLoadSnapshotChunk(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.UZERO; + message.format = object.format ?? 0; + message.chunk = object.chunk ?? 0; + return message; + } + +}; + +function createBaseRequestApplySnapshotChunk(): RequestApplySnapshotChunk { + return { + index: 0, + chunk: new Uint8Array(), + sender: "" + }; +} + +export const RequestApplySnapshotChunk = { + encode(message: RequestApplySnapshotChunk, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.index !== 0) { + writer.uint32(8).uint32(message.index); + } + + if (message.chunk.length !== 0) { + writer.uint32(18).bytes(message.chunk); + } + + if (message.sender !== "") { + writer.uint32(26).string(message.sender); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestApplySnapshotChunk { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestApplySnapshotChunk(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.index = reader.uint32(); + break; + + case 2: + message.chunk = reader.bytes(); + break; + + case 3: + message.sender = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): RequestApplySnapshotChunk { + const message = createBaseRequestApplySnapshotChunk(); + message.index = object.index ?? 0; + message.chunk = object.chunk ?? new Uint8Array(); + message.sender = object.sender ?? ""; + return message; + } + +}; + +function createBaseResponse(): Response { + return { + exception: undefined, + echo: undefined, + flush: undefined, + info: undefined, + setOption: undefined, + initChain: undefined, + query: undefined, + beginBlock: undefined, + checkTx: undefined, + deliverTx: undefined, + endBlock: undefined, + commit: undefined, + listSnapshots: undefined, + offerSnapshot: undefined, + loadSnapshotChunk: undefined, + applySnapshotChunk: undefined + }; +} + +export const Response = { + encode(message: Response, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.exception !== undefined) { + ResponseException.encode(message.exception, writer.uint32(10).fork()).ldelim(); + } + + if (message.echo !== undefined) { + ResponseEcho.encode(message.echo, writer.uint32(18).fork()).ldelim(); + } + + if (message.flush !== undefined) { + ResponseFlush.encode(message.flush, writer.uint32(26).fork()).ldelim(); + } + + if (message.info !== undefined) { + ResponseInfo.encode(message.info, writer.uint32(34).fork()).ldelim(); + } + + if (message.setOption !== undefined) { + ResponseSetOption.encode(message.setOption, writer.uint32(42).fork()).ldelim(); + } + + if (message.initChain !== undefined) { + ResponseInitChain.encode(message.initChain, writer.uint32(50).fork()).ldelim(); + } + + if (message.query !== undefined) { + ResponseQuery.encode(message.query, writer.uint32(58).fork()).ldelim(); + } + + if (message.beginBlock !== undefined) { + ResponseBeginBlock.encode(message.beginBlock, writer.uint32(66).fork()).ldelim(); + } + + if (message.checkTx !== undefined) { + ResponseCheckTx.encode(message.checkTx, writer.uint32(74).fork()).ldelim(); + } + + if (message.deliverTx !== undefined) { + ResponseDeliverTx.encode(message.deliverTx, writer.uint32(82).fork()).ldelim(); + } + + if (message.endBlock !== undefined) { + ResponseEndBlock.encode(message.endBlock, writer.uint32(90).fork()).ldelim(); + } + + if (message.commit !== undefined) { + ResponseCommit.encode(message.commit, writer.uint32(98).fork()).ldelim(); + } + + if (message.listSnapshots !== undefined) { + ResponseListSnapshots.encode(message.listSnapshots, writer.uint32(106).fork()).ldelim(); + } + + if (message.offerSnapshot !== undefined) { + ResponseOfferSnapshot.encode(message.offerSnapshot, writer.uint32(114).fork()).ldelim(); + } + + if (message.loadSnapshotChunk !== undefined) { + ResponseLoadSnapshotChunk.encode(message.loadSnapshotChunk, writer.uint32(122).fork()).ldelim(); + } + + if (message.applySnapshotChunk !== undefined) { + ResponseApplySnapshotChunk.encode(message.applySnapshotChunk, writer.uint32(130).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Response { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponse(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.exception = ResponseException.decode(reader, reader.uint32()); + break; + + case 2: + message.echo = ResponseEcho.decode(reader, reader.uint32()); + break; + + case 3: + message.flush = ResponseFlush.decode(reader, reader.uint32()); + break; + + case 4: + message.info = ResponseInfo.decode(reader, reader.uint32()); + break; + + case 5: + message.setOption = ResponseSetOption.decode(reader, reader.uint32()); + break; + + case 6: + message.initChain = ResponseInitChain.decode(reader, reader.uint32()); + break; + + case 7: + message.query = ResponseQuery.decode(reader, reader.uint32()); + break; + + case 8: + message.beginBlock = ResponseBeginBlock.decode(reader, reader.uint32()); + break; + + case 9: + message.checkTx = ResponseCheckTx.decode(reader, reader.uint32()); + break; + + case 10: + message.deliverTx = ResponseDeliverTx.decode(reader, reader.uint32()); + break; + + case 11: + message.endBlock = ResponseEndBlock.decode(reader, reader.uint32()); + break; + + case 12: + message.commit = ResponseCommit.decode(reader, reader.uint32()); + break; + + case 13: + message.listSnapshots = ResponseListSnapshots.decode(reader, reader.uint32()); + break; + + case 14: + message.offerSnapshot = ResponseOfferSnapshot.decode(reader, reader.uint32()); + break; + + case 15: + message.loadSnapshotChunk = ResponseLoadSnapshotChunk.decode(reader, reader.uint32()); + break; + + case 16: + message.applySnapshotChunk = ResponseApplySnapshotChunk.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Response { + const message = createBaseResponse(); + message.exception = object.exception !== undefined && object.exception !== null ? ResponseException.fromPartial(object.exception) : undefined; + message.echo = object.echo !== undefined && object.echo !== null ? ResponseEcho.fromPartial(object.echo) : undefined; + message.flush = object.flush !== undefined && object.flush !== null ? ResponseFlush.fromPartial(object.flush) : undefined; + message.info = object.info !== undefined && object.info !== null ? ResponseInfo.fromPartial(object.info) : undefined; + message.setOption = object.setOption !== undefined && object.setOption !== null ? ResponseSetOption.fromPartial(object.setOption) : undefined; + message.initChain = object.initChain !== undefined && object.initChain !== null ? ResponseInitChain.fromPartial(object.initChain) : undefined; + message.query = object.query !== undefined && object.query !== null ? ResponseQuery.fromPartial(object.query) : undefined; + message.beginBlock = object.beginBlock !== undefined && object.beginBlock !== null ? ResponseBeginBlock.fromPartial(object.beginBlock) : undefined; + message.checkTx = object.checkTx !== undefined && object.checkTx !== null ? ResponseCheckTx.fromPartial(object.checkTx) : undefined; + message.deliverTx = object.deliverTx !== undefined && object.deliverTx !== null ? ResponseDeliverTx.fromPartial(object.deliverTx) : undefined; + message.endBlock = object.endBlock !== undefined && object.endBlock !== null ? ResponseEndBlock.fromPartial(object.endBlock) : undefined; + message.commit = object.commit !== undefined && object.commit !== null ? ResponseCommit.fromPartial(object.commit) : undefined; + message.listSnapshots = object.listSnapshots !== undefined && object.listSnapshots !== null ? ResponseListSnapshots.fromPartial(object.listSnapshots) : undefined; + message.offerSnapshot = object.offerSnapshot !== undefined && object.offerSnapshot !== null ? ResponseOfferSnapshot.fromPartial(object.offerSnapshot) : undefined; + message.loadSnapshotChunk = object.loadSnapshotChunk !== undefined && object.loadSnapshotChunk !== null ? ResponseLoadSnapshotChunk.fromPartial(object.loadSnapshotChunk) : undefined; + message.applySnapshotChunk = object.applySnapshotChunk !== undefined && object.applySnapshotChunk !== null ? ResponseApplySnapshotChunk.fromPartial(object.applySnapshotChunk) : undefined; + return message; + } + +}; + +function createBaseResponseException(): ResponseException { + return { + error: "" + }; +} + +export const ResponseException = { + encode(message: ResponseException, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.error !== "") { + writer.uint32(10).string(message.error); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseException { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseException(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.error = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseException { + const message = createBaseResponseException(); + message.error = object.error ?? ""; + return message; + } + +}; + +function createBaseResponseEcho(): ResponseEcho { + return { + message: "" + }; +} + +export const ResponseEcho = { + encode(message: ResponseEcho, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.message !== "") { + writer.uint32(10).string(message.message); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseEcho { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseEcho(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.message = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseEcho { + const message = createBaseResponseEcho(); + message.message = object.message ?? ""; + return message; + } + +}; + +function createBaseResponseFlush(): ResponseFlush { + return {}; +} + +export const ResponseFlush = { + encode(_: ResponseFlush, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseFlush { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseFlush(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(_: DeepPartial): ResponseFlush { + const message = createBaseResponseFlush(); + return message; + } + +}; + +function createBaseResponseInfo(): ResponseInfo { + return { + data: "", + version: "", + appVersion: Long.UZERO, + lastBlockHeight: Long.ZERO, + lastBlockAppHash: new Uint8Array() + }; +} + +export const ResponseInfo = { + encode(message: ResponseInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data !== "") { + writer.uint32(10).string(message.data); + } + + if (message.version !== "") { + writer.uint32(18).string(message.version); + } + + if (!message.appVersion.isZero()) { + writer.uint32(24).uint64(message.appVersion); + } + + if (!message.lastBlockHeight.isZero()) { + writer.uint32(32).int64(message.lastBlockHeight); + } + + if (message.lastBlockAppHash.length !== 0) { + writer.uint32(42).bytes(message.lastBlockAppHash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.data = reader.string(); + break; + + case 2: + message.version = reader.string(); + break; + + case 3: + message.appVersion = (reader.uint64() as Long); + break; + + case 4: + message.lastBlockHeight = (reader.int64() as Long); + break; + + case 5: + message.lastBlockAppHash = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseInfo { + const message = createBaseResponseInfo(); + message.data = object.data ?? ""; + message.version = object.version ?? ""; + message.appVersion = object.appVersion !== undefined && object.appVersion !== null ? Long.fromValue(object.appVersion) : Long.UZERO; + message.lastBlockHeight = object.lastBlockHeight !== undefined && object.lastBlockHeight !== null ? Long.fromValue(object.lastBlockHeight) : Long.ZERO; + message.lastBlockAppHash = object.lastBlockAppHash ?? new Uint8Array(); + return message; + } + +}; + +function createBaseResponseSetOption(): ResponseSetOption { + return { + code: 0, + log: "", + info: "" + }; +} + +export const ResponseSetOption = { + encode(message: ResponseSetOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.code !== 0) { + writer.uint32(8).uint32(message.code); + } + + if (message.log !== "") { + writer.uint32(26).string(message.log); + } + + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseSetOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseSetOption(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.code = reader.uint32(); + break; + + case 3: + message.log = reader.string(); + break; + + case 4: + message.info = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseSetOption { + const message = createBaseResponseSetOption(); + message.code = object.code ?? 0; + message.log = object.log ?? ""; + message.info = object.info ?? ""; + return message; + } + +}; + +function createBaseResponseInitChain(): ResponseInitChain { + return { + consensusParams: undefined, + validators: [], + appHash: new Uint8Array() + }; +} + +export const ResponseInitChain = { + encode(message: ResponseInitChain, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.consensusParams !== undefined) { + ConsensusParams.encode(message.consensusParams, writer.uint32(10).fork()).ldelim(); + } + + for (const v of message.validators) { + ValidatorUpdate.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.appHash.length !== 0) { + writer.uint32(26).bytes(message.appHash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseInitChain { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseInitChain(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.consensusParams = ConsensusParams.decode(reader, reader.uint32()); + break; + + case 2: + message.validators.push(ValidatorUpdate.decode(reader, reader.uint32())); + break; + + case 3: + message.appHash = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseInitChain { + const message = createBaseResponseInitChain(); + message.consensusParams = object.consensusParams !== undefined && object.consensusParams !== null ? ConsensusParams.fromPartial(object.consensusParams) : undefined; + message.validators = object.validators?.map(e => ValidatorUpdate.fromPartial(e)) || []; + message.appHash = object.appHash ?? new Uint8Array(); + return message; + } + +}; + +function createBaseResponseQuery(): ResponseQuery { + return { + code: 0, + log: "", + info: "", + index: Long.ZERO, + key: new Uint8Array(), + value: new Uint8Array(), + proofOps: undefined, + height: Long.ZERO, + codespace: "" + }; +} + +export const ResponseQuery = { + encode(message: ResponseQuery, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.code !== 0) { + writer.uint32(8).uint32(message.code); + } + + if (message.log !== "") { + writer.uint32(26).string(message.log); + } + + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + + if (!message.index.isZero()) { + writer.uint32(40).int64(message.index); + } + + if (message.key.length !== 0) { + writer.uint32(50).bytes(message.key); + } + + if (message.value.length !== 0) { + writer.uint32(58).bytes(message.value); + } + + if (message.proofOps !== undefined) { + ProofOps.encode(message.proofOps, writer.uint32(66).fork()).ldelim(); + } + + if (!message.height.isZero()) { + writer.uint32(72).int64(message.height); + } + + if (message.codespace !== "") { + writer.uint32(82).string(message.codespace); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseQuery { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseQuery(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.code = reader.uint32(); + break; + + case 3: + message.log = reader.string(); + break; + + case 4: + message.info = reader.string(); + break; + + case 5: + message.index = (reader.int64() as Long); + break; + + case 6: + message.key = reader.bytes(); + break; + + case 7: + message.value = reader.bytes(); + break; + + case 8: + message.proofOps = ProofOps.decode(reader, reader.uint32()); + break; + + case 9: + message.height = (reader.int64() as Long); + break; + + case 10: + message.codespace = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseQuery { + const message = createBaseResponseQuery(); + message.code = object.code ?? 0; + message.log = object.log ?? ""; + message.info = object.info ?? ""; + message.index = object.index !== undefined && object.index !== null ? Long.fromValue(object.index) : Long.ZERO; + message.key = object.key ?? new Uint8Array(); + message.value = object.value ?? new Uint8Array(); + message.proofOps = object.proofOps !== undefined && object.proofOps !== null ? ProofOps.fromPartial(object.proofOps) : undefined; + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.codespace = object.codespace ?? ""; + return message; + } + +}; + +function createBaseResponseBeginBlock(): ResponseBeginBlock { + return { + events: [] + }; +} + +export const ResponseBeginBlock = { + encode(message: ResponseBeginBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.events) { + Event.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseBeginBlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseBeginBlock(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.events.push(Event.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseBeginBlock { + const message = createBaseResponseBeginBlock(); + message.events = object.events?.map(e => Event.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseResponseCheckTx(): ResponseCheckTx { + return { + code: 0, + data: new Uint8Array(), + log: "", + info: "", + gasWanted: Long.ZERO, + gasUsed: Long.ZERO, + events: [], + codespace: "" + }; +} + +export const ResponseCheckTx = { + encode(message: ResponseCheckTx, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.code !== 0) { + writer.uint32(8).uint32(message.code); + } + + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + + if (message.log !== "") { + writer.uint32(26).string(message.log); + } + + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + + if (!message.gasWanted.isZero()) { + writer.uint32(40).int64(message.gasWanted); + } + + if (!message.gasUsed.isZero()) { + writer.uint32(48).int64(message.gasUsed); + } + + for (const v of message.events) { + Event.encode(v!, writer.uint32(58).fork()).ldelim(); + } + + if (message.codespace !== "") { + writer.uint32(66).string(message.codespace); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseCheckTx { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseCheckTx(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.code = reader.uint32(); + break; + + case 2: + message.data = reader.bytes(); + break; + + case 3: + message.log = reader.string(); + break; + + case 4: + message.info = reader.string(); + break; + + case 5: + message.gasWanted = (reader.int64() as Long); + break; + + case 6: + message.gasUsed = (reader.int64() as Long); + break; + + case 7: + message.events.push(Event.decode(reader, reader.uint32())); + break; + + case 8: + message.codespace = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseCheckTx { + const message = createBaseResponseCheckTx(); + message.code = object.code ?? 0; + message.data = object.data ?? new Uint8Array(); + message.log = object.log ?? ""; + message.info = object.info ?? ""; + message.gasWanted = object.gasWanted !== undefined && object.gasWanted !== null ? Long.fromValue(object.gasWanted) : Long.ZERO; + message.gasUsed = object.gasUsed !== undefined && object.gasUsed !== null ? Long.fromValue(object.gasUsed) : Long.ZERO; + message.events = object.events?.map(e => Event.fromPartial(e)) || []; + message.codespace = object.codespace ?? ""; + return message; + } + +}; + +function createBaseResponseDeliverTx(): ResponseDeliverTx { + return { + code: 0, + data: new Uint8Array(), + log: "", + info: "", + gasWanted: Long.ZERO, + gasUsed: Long.ZERO, + events: [], + codespace: "" + }; +} + +export const ResponseDeliverTx = { + encode(message: ResponseDeliverTx, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.code !== 0) { + writer.uint32(8).uint32(message.code); + } + + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + + if (message.log !== "") { + writer.uint32(26).string(message.log); + } + + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + + if (!message.gasWanted.isZero()) { + writer.uint32(40).int64(message.gasWanted); + } + + if (!message.gasUsed.isZero()) { + writer.uint32(48).int64(message.gasUsed); + } + + for (const v of message.events) { + Event.encode(v!, writer.uint32(58).fork()).ldelim(); + } + + if (message.codespace !== "") { + writer.uint32(66).string(message.codespace); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseDeliverTx { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseDeliverTx(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.code = reader.uint32(); + break; + + case 2: + message.data = reader.bytes(); + break; + + case 3: + message.log = reader.string(); + break; + + case 4: + message.info = reader.string(); + break; + + case 5: + message.gasWanted = (reader.int64() as Long); + break; + + case 6: + message.gasUsed = (reader.int64() as Long); + break; + + case 7: + message.events.push(Event.decode(reader, reader.uint32())); + break; + + case 8: + message.codespace = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseDeliverTx { + const message = createBaseResponseDeliverTx(); + message.code = object.code ?? 0; + message.data = object.data ?? new Uint8Array(); + message.log = object.log ?? ""; + message.info = object.info ?? ""; + message.gasWanted = object.gasWanted !== undefined && object.gasWanted !== null ? Long.fromValue(object.gasWanted) : Long.ZERO; + message.gasUsed = object.gasUsed !== undefined && object.gasUsed !== null ? Long.fromValue(object.gasUsed) : Long.ZERO; + message.events = object.events?.map(e => Event.fromPartial(e)) || []; + message.codespace = object.codespace ?? ""; + return message; + } + +}; + +function createBaseResponseEndBlock(): ResponseEndBlock { + return { + validatorUpdates: [], + consensusParamUpdates: undefined, + events: [] + }; +} + +export const ResponseEndBlock = { + encode(message: ResponseEndBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.validatorUpdates) { + ValidatorUpdate.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.consensusParamUpdates !== undefined) { + ConsensusParams.encode(message.consensusParamUpdates, writer.uint32(18).fork()).ldelim(); + } + + for (const v of message.events) { + Event.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseEndBlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseEndBlock(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validatorUpdates.push(ValidatorUpdate.decode(reader, reader.uint32())); + break; + + case 2: + message.consensusParamUpdates = ConsensusParams.decode(reader, reader.uint32()); + break; + + case 3: + message.events.push(Event.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseEndBlock { + const message = createBaseResponseEndBlock(); + message.validatorUpdates = object.validatorUpdates?.map(e => ValidatorUpdate.fromPartial(e)) || []; + message.consensusParamUpdates = object.consensusParamUpdates !== undefined && object.consensusParamUpdates !== null ? ConsensusParams.fromPartial(object.consensusParamUpdates) : undefined; + message.events = object.events?.map(e => Event.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseResponseCommit(): ResponseCommit { + return { + data: new Uint8Array(), + retainHeight: Long.ZERO + }; +} + +export const ResponseCommit = { + encode(message: ResponseCommit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + + if (!message.retainHeight.isZero()) { + writer.uint32(24).int64(message.retainHeight); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseCommit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseCommit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 2: + message.data = reader.bytes(); + break; + + case 3: + message.retainHeight = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseCommit { + const message = createBaseResponseCommit(); + message.data = object.data ?? new Uint8Array(); + message.retainHeight = object.retainHeight !== undefined && object.retainHeight !== null ? Long.fromValue(object.retainHeight) : Long.ZERO; + return message; + } + +}; + +function createBaseResponseListSnapshots(): ResponseListSnapshots { + return { + snapshots: [] + }; +} + +export const ResponseListSnapshots = { + encode(message: ResponseListSnapshots, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.snapshots) { + Snapshot.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseListSnapshots { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseListSnapshots(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.snapshots.push(Snapshot.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseListSnapshots { + const message = createBaseResponseListSnapshots(); + message.snapshots = object.snapshots?.map(e => Snapshot.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseResponseOfferSnapshot(): ResponseOfferSnapshot { + return { + result: 0 + }; +} + +export const ResponseOfferSnapshot = { + encode(message: ResponseOfferSnapshot, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.result !== 0) { + writer.uint32(8).int32(message.result); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseOfferSnapshot { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseOfferSnapshot(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.result = (reader.int32() as any); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseOfferSnapshot { + const message = createBaseResponseOfferSnapshot(); + message.result = object.result ?? 0; + return message; + } + +}; + +function createBaseResponseLoadSnapshotChunk(): ResponseLoadSnapshotChunk { + return { + chunk: new Uint8Array() + }; +} + +export const ResponseLoadSnapshotChunk = { + encode(message: ResponseLoadSnapshotChunk, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.chunk.length !== 0) { + writer.uint32(10).bytes(message.chunk); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseLoadSnapshotChunk { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseLoadSnapshotChunk(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.chunk = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseLoadSnapshotChunk { + const message = createBaseResponseLoadSnapshotChunk(); + message.chunk = object.chunk ?? new Uint8Array(); + return message; + } + +}; + +function createBaseResponseApplySnapshotChunk(): ResponseApplySnapshotChunk { + return { + result: 0, + refetchChunks: [], + rejectSenders: [] + }; +} + +export const ResponseApplySnapshotChunk = { + encode(message: ResponseApplySnapshotChunk, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.result !== 0) { + writer.uint32(8).int32(message.result); + } + + writer.uint32(18).fork(); + + for (const v of message.refetchChunks) { + writer.uint32(v); + } + + writer.ldelim(); + + for (const v of message.rejectSenders) { + writer.uint32(26).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseApplySnapshotChunk { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseApplySnapshotChunk(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.result = (reader.int32() as any); + break; + + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.refetchChunks.push(reader.uint32()); + } + } else { + message.refetchChunks.push(reader.uint32()); + } + + break; + + case 3: + message.rejectSenders.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ResponseApplySnapshotChunk { + const message = createBaseResponseApplySnapshotChunk(); + message.result = object.result ?? 0; + message.refetchChunks = object.refetchChunks?.map(e => e) || []; + message.rejectSenders = object.rejectSenders?.map(e => e) || []; + return message; + } + +}; + +function createBaseConsensusParams(): ConsensusParams { + return { + block: undefined, + evidence: undefined, + validator: undefined, + version: undefined + }; +} + +export const ConsensusParams = { + encode(message: ConsensusParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.block !== undefined) { + BlockParams.encode(message.block, writer.uint32(10).fork()).ldelim(); + } + + if (message.evidence !== undefined) { + EvidenceParams.encode(message.evidence, writer.uint32(18).fork()).ldelim(); + } + + if (message.validator !== undefined) { + ValidatorParams.encode(message.validator, writer.uint32(26).fork()).ldelim(); + } + + if (message.version !== undefined) { + VersionParams.encode(message.version, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensusParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.block = BlockParams.decode(reader, reader.uint32()); + break; + + case 2: + message.evidence = EvidenceParams.decode(reader, reader.uint32()); + break; + + case 3: + message.validator = ValidatorParams.decode(reader, reader.uint32()); + break; + + case 4: + message.version = VersionParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConsensusParams { + const message = createBaseConsensusParams(); + message.block = object.block !== undefined && object.block !== null ? BlockParams.fromPartial(object.block) : undefined; + message.evidence = object.evidence !== undefined && object.evidence !== null ? EvidenceParams.fromPartial(object.evidence) : undefined; + message.validator = object.validator !== undefined && object.validator !== null ? ValidatorParams.fromPartial(object.validator) : undefined; + message.version = object.version !== undefined && object.version !== null ? VersionParams.fromPartial(object.version) : undefined; + return message; + } + +}; + +function createBaseBlockParams(): BlockParams { + return { + maxBytes: Long.ZERO, + maxGas: Long.ZERO + }; +} + +export const BlockParams = { + encode(message: BlockParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.maxBytes.isZero()) { + writer.uint32(8).int64(message.maxBytes); + } + + if (!message.maxGas.isZero()) { + writer.uint32(16).int64(message.maxGas); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BlockParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.maxBytes = (reader.int64() as Long); + break; + + case 2: + message.maxGas = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BlockParams { + const message = createBaseBlockParams(); + message.maxBytes = object.maxBytes !== undefined && object.maxBytes !== null ? Long.fromValue(object.maxBytes) : Long.ZERO; + message.maxGas = object.maxGas !== undefined && object.maxGas !== null ? Long.fromValue(object.maxGas) : Long.ZERO; + return message; + } + +}; + +function createBaseLastCommitInfo(): LastCommitInfo { + return { + round: 0, + votes: [] + }; +} + +export const LastCommitInfo = { + encode(message: LastCommitInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.round !== 0) { + writer.uint32(8).int32(message.round); + } + + for (const v of message.votes) { + VoteInfo.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): LastCommitInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseLastCommitInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.round = reader.int32(); + break; + + case 2: + message.votes.push(VoteInfo.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): LastCommitInfo { + const message = createBaseLastCommitInfo(); + message.round = object.round ?? 0; + message.votes = object.votes?.map(e => VoteInfo.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseEvent(): Event { + return { + type: "", + attributes: [] + }; +} + +export const Event = { + encode(message: Event, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== "") { + writer.uint32(10).string(message.type); + } + + for (const v of message.attributes) { + EventAttribute.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Event { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEvent(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.type = reader.string(); + break; + + case 2: + message.attributes.push(EventAttribute.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Event { + const message = createBaseEvent(); + message.type = object.type ?? ""; + message.attributes = object.attributes?.map(e => EventAttribute.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseEventAttribute(): EventAttribute { + return { + key: new Uint8Array(), + value: new Uint8Array(), + index: false + }; +} + +export const EventAttribute = { + encode(message: EventAttribute, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + + if (message.index === true) { + writer.uint32(24).bool(message.index); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventAttribute { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventAttribute(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + case 2: + message.value = reader.bytes(); + break; + + case 3: + message.index = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EventAttribute { + const message = createBaseEventAttribute(); + message.key = object.key ?? new Uint8Array(); + message.value = object.value ?? new Uint8Array(); + message.index = object.index ?? false; + return message; + } + +}; + +function createBaseTxResult(): TxResult { + return { + height: Long.ZERO, + index: 0, + tx: new Uint8Array(), + result: undefined + }; +} + +export const TxResult = { + encode(message: TxResult, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).int64(message.height); + } + + if (message.index !== 0) { + writer.uint32(16).uint32(message.index); + } + + if (message.tx.length !== 0) { + writer.uint32(26).bytes(message.tx); + } + + if (message.result !== undefined) { + ResponseDeliverTx.encode(message.result, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxResult { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxResult(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.int64() as Long); + break; + + case 2: + message.index = reader.uint32(); + break; + + case 3: + message.tx = reader.bytes(); + break; + + case 4: + message.result = ResponseDeliverTx.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TxResult { + const message = createBaseTxResult(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.index = object.index ?? 0; + message.tx = object.tx ?? new Uint8Array(); + message.result = object.result !== undefined && object.result !== null ? ResponseDeliverTx.fromPartial(object.result) : undefined; + return message; + } + +}; + +function createBaseValidator(): Validator { + return { + address: new Uint8Array(), + power: Long.ZERO + }; +} + +export const Validator = { + encode(message: Validator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address.length !== 0) { + writer.uint32(10).bytes(message.address); + } + + if (!message.power.isZero()) { + writer.uint32(24).int64(message.power); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Validator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidator(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.bytes(); + break; + + case 3: + message.power = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Validator { + const message = createBaseValidator(); + message.address = object.address ?? new Uint8Array(); + message.power = object.power !== undefined && object.power !== null ? Long.fromValue(object.power) : Long.ZERO; + return message; + } + +}; + +function createBaseValidatorUpdate(): ValidatorUpdate { + return { + pubKey: undefined, + power: Long.ZERO + }; +} + +export const ValidatorUpdate = { + encode(message: ValidatorUpdate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pubKey !== undefined) { + PublicKey.encode(message.pubKey, writer.uint32(10).fork()).ldelim(); + } + + if (!message.power.isZero()) { + writer.uint32(16).int64(message.power); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorUpdate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorUpdate(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pubKey = PublicKey.decode(reader, reader.uint32()); + break; + + case 2: + message.power = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorUpdate { + const message = createBaseValidatorUpdate(); + message.pubKey = object.pubKey !== undefined && object.pubKey !== null ? PublicKey.fromPartial(object.pubKey) : undefined; + message.power = object.power !== undefined && object.power !== null ? Long.fromValue(object.power) : Long.ZERO; + return message; + } + +}; + +function createBaseVoteInfo(): VoteInfo { + return { + validator: undefined, + signedLastBlock: false + }; +} + +export const VoteInfo = { + encode(message: VoteInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validator !== undefined) { + Validator.encode(message.validator, writer.uint32(10).fork()).ldelim(); + } + + if (message.signedLastBlock === true) { + writer.uint32(16).bool(message.signedLastBlock); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VoteInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVoteInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validator = Validator.decode(reader, reader.uint32()); + break; + + case 2: + message.signedLastBlock = reader.bool(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): VoteInfo { + const message = createBaseVoteInfo(); + message.validator = object.validator !== undefined && object.validator !== null ? Validator.fromPartial(object.validator) : undefined; + message.signedLastBlock = object.signedLastBlock ?? false; + return message; + } + +}; + +function createBaseEvidence(): Evidence { + return { + type: 0, + validator: undefined, + height: Long.ZERO, + time: undefined, + totalVotingPower: Long.ZERO + }; +} + +export const Evidence = { + encode(message: Evidence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== 0) { + writer.uint32(8).int32(message.type); + } + + if (message.validator !== undefined) { + Validator.encode(message.validator, writer.uint32(18).fork()).ldelim(); + } + + if (!message.height.isZero()) { + writer.uint32(24).int64(message.height); + } + + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(34).fork()).ldelim(); + } + + if (!message.totalVotingPower.isZero()) { + writer.uint32(40).int64(message.totalVotingPower); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Evidence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEvidence(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.type = (reader.int32() as any); + break; + + case 2: + message.validator = Validator.decode(reader, reader.uint32()); + break; + + case 3: + message.height = (reader.int64() as Long); + break; + + case 4: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 5: + message.totalVotingPower = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Evidence { + const message = createBaseEvidence(); + message.type = object.type ?? 0; + message.validator = object.validator !== undefined && object.validator !== null ? Validator.fromPartial(object.validator) : undefined; + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.time = object.time ?? undefined; + message.totalVotingPower = object.totalVotingPower !== undefined && object.totalVotingPower !== null ? Long.fromValue(object.totalVotingPower) : Long.ZERO; + return message; + } + +}; + +function createBaseSnapshot(): Snapshot { + return { + height: Long.UZERO, + format: 0, + chunks: 0, + hash: new Uint8Array(), + metadata: new Uint8Array() + }; +} + +export const Snapshot = { + encode(message: Snapshot, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).uint64(message.height); + } + + if (message.format !== 0) { + writer.uint32(16).uint32(message.format); + } + + if (message.chunks !== 0) { + writer.uint32(24).uint32(message.chunks); + } + + if (message.hash.length !== 0) { + writer.uint32(34).bytes(message.hash); + } + + if (message.metadata.length !== 0) { + writer.uint32(42).bytes(message.metadata); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Snapshot { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSnapshot(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.uint64() as Long); + break; + + case 2: + message.format = reader.uint32(); + break; + + case 3: + message.chunks = reader.uint32(); + break; + + case 4: + message.hash = reader.bytes(); + break; + + case 5: + message.metadata = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Snapshot { + const message = createBaseSnapshot(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.UZERO; + message.format = object.format ?? 0; + message.chunks = object.chunks ?? 0; + message.hash = object.hash ?? new Uint8Array(); + message.metadata = object.metadata ?? new Uint8Array(); + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/tendermint/bundle.ts b/packages/codegen/src/tendermint/bundle.ts new file mode 100644 index 00000000..f85a7599 --- /dev/null +++ b/packages/codegen/src/tendermint/bundle.ts @@ -0,0 +1,32 @@ +import * as _134 from "./abci/types"; +import * as _135 from "./crypto/keys"; +import * as _136 from "./crypto/proof"; +import * as _137 from "./libs/bits/types"; +import * as _138 from "./p2p/types"; +import * as _139 from "./types/block"; +import * as _140 from "./types/evidence"; +import * as _141 from "./types/params"; +import * as _142 from "./types/types"; +import * as _143 from "./types/validator"; +import * as _144 from "./version/types"; +export namespace tendermint { + export const abci = { ..._134 + }; + export const crypto = { ..._135, + ..._136 + }; + export namespace libs { + export const bits = { ..._137 + }; + } + export const p2p = { ..._138 + }; + export const types = { ..._139, + ..._140, + ..._141, + ..._142, + ..._143 + }; + export const version = { ..._144 + }; +} \ No newline at end of file diff --git a/packages/codegen/src/tendermint/crypto/keys.ts b/packages/codegen/src/tendermint/crypto/keys.ts new file mode 100644 index 00000000..21481d01 --- /dev/null +++ b/packages/codegen/src/tendermint/crypto/keys.ts @@ -0,0 +1,69 @@ +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +/** PublicKey defines the keys available for use with Tendermint Validators */ + +export interface PublicKey { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; +} +/** PublicKey defines the keys available for use with Tendermint Validators */ + +export interface PublicKeySDKType { + ed25519?: Uint8Array; + secp256k1?: Uint8Array; +} + +function createBasePublicKey(): PublicKey { + return { + ed25519: undefined, + secp256k1: undefined + }; +} + +export const PublicKey = { + encode(message: PublicKey, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ed25519 !== undefined) { + writer.uint32(10).bytes(message.ed25519); + } + + if (message.secp256k1 !== undefined) { + writer.uint32(18).bytes(message.secp256k1); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PublicKey { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePublicKey(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.ed25519 = reader.bytes(); + break; + + case 2: + message.secp256k1 = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PublicKey { + const message = createBasePublicKey(); + message.ed25519 = object.ed25519 ?? undefined; + message.secp256k1 = object.secp256k1 ?? undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/tendermint/crypto/proof.ts b/packages/codegen/src/tendermint/crypto/proof.ts new file mode 100644 index 00000000..6502ae72 --- /dev/null +++ b/packages/codegen/src/tendermint/crypto/proof.ts @@ -0,0 +1,372 @@ +import { Long, DeepPartial } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +export interface Proof { + total: Long; + index: Long; + leafHash: Uint8Array; + aunts: Uint8Array[]; +} +export interface ProofSDKType { + total: Long; + index: Long; + leaf_hash: Uint8Array; + aunts: Uint8Array[]; +} +export interface ValueOp { + /** Encoded in ProofOp.Key. */ + key: Uint8Array; + /** To encode in ProofOp.Data */ + + proof?: Proof; +} +export interface ValueOpSDKType { + key: Uint8Array; + proof?: ProofSDKType; +} +export interface DominoOp { + key: string; + input: string; + output: string; +} +export interface DominoOpSDKType { + key: string; + input: string; + output: string; +} +/** + * ProofOp defines an operation used for calculating Merkle root + * The data could be arbitrary format, providing nessecary data + * for example neighbouring node hash + */ + +export interface ProofOp { + type: string; + key: Uint8Array; + data: Uint8Array; +} +/** + * ProofOp defines an operation used for calculating Merkle root + * The data could be arbitrary format, providing nessecary data + * for example neighbouring node hash + */ + +export interface ProofOpSDKType { + type: string; + key: Uint8Array; + data: Uint8Array; +} +/** ProofOps is Merkle proof defined by the list of ProofOps */ + +export interface ProofOps { + ops: ProofOp[]; +} +/** ProofOps is Merkle proof defined by the list of ProofOps */ + +export interface ProofOpsSDKType { + ops: ProofOpSDKType[]; +} + +function createBaseProof(): Proof { + return { + total: Long.ZERO, + index: Long.ZERO, + leafHash: new Uint8Array(), + aunts: [] + }; +} + +export const Proof = { + encode(message: Proof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.total.isZero()) { + writer.uint32(8).int64(message.total); + } + + if (!message.index.isZero()) { + writer.uint32(16).int64(message.index); + } + + if (message.leafHash.length !== 0) { + writer.uint32(26).bytes(message.leafHash); + } + + for (const v of message.aunts) { + writer.uint32(34).bytes(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Proof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProof(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.total = (reader.int64() as Long); + break; + + case 2: + message.index = (reader.int64() as Long); + break; + + case 3: + message.leafHash = reader.bytes(); + break; + + case 4: + message.aunts.push(reader.bytes()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Proof { + const message = createBaseProof(); + message.total = object.total !== undefined && object.total !== null ? Long.fromValue(object.total) : Long.ZERO; + message.index = object.index !== undefined && object.index !== null ? Long.fromValue(object.index) : Long.ZERO; + message.leafHash = object.leafHash ?? new Uint8Array(); + message.aunts = object.aunts?.map(e => e) || []; + return message; + } + +}; + +function createBaseValueOp(): ValueOp { + return { + key: new Uint8Array(), + proof: undefined + }; +} + +export const ValueOp = { + encode(message: ValueOp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + + if (message.proof !== undefined) { + Proof.encode(message.proof, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValueOp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValueOp(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + + case 2: + message.proof = Proof.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValueOp { + const message = createBaseValueOp(); + message.key = object.key ?? new Uint8Array(); + message.proof = object.proof !== undefined && object.proof !== null ? Proof.fromPartial(object.proof) : undefined; + return message; + } + +}; + +function createBaseDominoOp(): DominoOp { + return { + key: "", + input: "", + output: "" + }; +} + +export const DominoOp = { + encode(message: DominoOp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + + if (message.input !== "") { + writer.uint32(18).string(message.input); + } + + if (message.output !== "") { + writer.uint32(26).string(message.output); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DominoOp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDominoOp(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.key = reader.string(); + break; + + case 2: + message.input = reader.string(); + break; + + case 3: + message.output = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DominoOp { + const message = createBaseDominoOp(); + message.key = object.key ?? ""; + message.input = object.input ?? ""; + message.output = object.output ?? ""; + return message; + } + +}; + +function createBaseProofOp(): ProofOp { + return { + type: "", + key: new Uint8Array(), + data: new Uint8Array() + }; +} + +export const ProofOp = { + encode(message: ProofOp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== "") { + writer.uint32(10).string(message.type); + } + + if (message.key.length !== 0) { + writer.uint32(18).bytes(message.key); + } + + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProofOp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProofOp(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.type = reader.string(); + break; + + case 2: + message.key = reader.bytes(); + break; + + case 3: + message.data = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ProofOp { + const message = createBaseProofOp(); + message.type = object.type ?? ""; + message.key = object.key ?? new Uint8Array(); + message.data = object.data ?? new Uint8Array(); + return message; + } + +}; + +function createBaseProofOps(): ProofOps { + return { + ops: [] + }; +} + +export const ProofOps = { + encode(message: ProofOps, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.ops) { + ProofOp.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProofOps { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProofOps(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.ops.push(ProofOp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ProofOps { + const message = createBaseProofOps(); + message.ops = object.ops?.map(e => ProofOp.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/tendermint/libs/bits/types.ts b/packages/codegen/src/tendermint/libs/bits/types.ts new file mode 100644 index 00000000..25071f01 --- /dev/null +++ b/packages/codegen/src/tendermint/libs/bits/types.ts @@ -0,0 +1,77 @@ +import { Long, DeepPartial } from "../../../helpers"; +import * as _m0 from "protobufjs/minimal"; +export interface BitArray { + bits: Long; + elems: Long[]; +} +export interface BitArraySDKType { + bits: Long; + elems: Long[]; +} + +function createBaseBitArray(): BitArray { + return { + bits: Long.ZERO, + elems: [] + }; +} + +export const BitArray = { + encode(message: BitArray, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.bits.isZero()) { + writer.uint32(8).int64(message.bits); + } + + writer.uint32(18).fork(); + + for (const v of message.elems) { + writer.uint64(v); + } + + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BitArray { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBitArray(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.bits = (reader.int64() as Long); + break; + + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + + while (reader.pos < end2) { + message.elems.push((reader.uint64() as Long)); + } + } else { + message.elems.push((reader.uint64() as Long)); + } + + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BitArray { + const message = createBaseBitArray(); + message.bits = object.bits !== undefined && object.bits !== null ? Long.fromValue(object.bits) : Long.ZERO; + message.elems = object.elems?.map(e => Long.fromValue(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/tendermint/p2p/types.ts b/packages/codegen/src/tendermint/p2p/types.ts new file mode 100644 index 00000000..64ce38af --- /dev/null +++ b/packages/codegen/src/tendermint/p2p/types.ts @@ -0,0 +1,438 @@ +import { Timestamp } from "../../google/protobuf/timestamp"; +import { Long, DeepPartial, toTimestamp, fromTimestamp } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +export interface ProtocolVersion { + p2p: Long; + block: Long; + app: Long; +} +export interface ProtocolVersionSDKType { + p2p: Long; + block: Long; + app: Long; +} +export interface NodeInfo { + protocolVersion?: ProtocolVersion; + nodeId: string; + listenAddr: string; + network: string; + version: string; + channels: Uint8Array; + moniker: string; + other?: NodeInfoOther; +} +export interface NodeInfoSDKType { + protocol_version?: ProtocolVersionSDKType; + node_id: string; + listen_addr: string; + network: string; + version: string; + channels: Uint8Array; + moniker: string; + other?: NodeInfoOtherSDKType; +} +export interface NodeInfoOther { + txIndex: string; + rpcAddress: string; +} +export interface NodeInfoOtherSDKType { + tx_index: string; + rpc_address: string; +} +export interface PeerInfo { + id: string; + addressInfo: PeerAddressInfo[]; + lastConnected?: Date; +} +export interface PeerInfoSDKType { + id: string; + address_info: PeerAddressInfoSDKType[]; + last_connected?: Date; +} +export interface PeerAddressInfo { + address: string; + lastDialSuccess?: Date; + lastDialFailure?: Date; + dialFailures: number; +} +export interface PeerAddressInfoSDKType { + address: string; + last_dial_success?: Date; + last_dial_failure?: Date; + dial_failures: number; +} + +function createBaseProtocolVersion(): ProtocolVersion { + return { + p2p: Long.UZERO, + block: Long.UZERO, + app: Long.UZERO + }; +} + +export const ProtocolVersion = { + encode(message: ProtocolVersion, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.p2p.isZero()) { + writer.uint32(8).uint64(message.p2p); + } + + if (!message.block.isZero()) { + writer.uint32(16).uint64(message.block); + } + + if (!message.app.isZero()) { + writer.uint32(24).uint64(message.app); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProtocolVersion { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProtocolVersion(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.p2p = (reader.uint64() as Long); + break; + + case 2: + message.block = (reader.uint64() as Long); + break; + + case 3: + message.app = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ProtocolVersion { + const message = createBaseProtocolVersion(); + message.p2p = object.p2p !== undefined && object.p2p !== null ? Long.fromValue(object.p2p) : Long.UZERO; + message.block = object.block !== undefined && object.block !== null ? Long.fromValue(object.block) : Long.UZERO; + message.app = object.app !== undefined && object.app !== null ? Long.fromValue(object.app) : Long.UZERO; + return message; + } + +}; + +function createBaseNodeInfo(): NodeInfo { + return { + protocolVersion: undefined, + nodeId: "", + listenAddr: "", + network: "", + version: "", + channels: new Uint8Array(), + moniker: "", + other: undefined + }; +} + +export const NodeInfo = { + encode(message: NodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.protocolVersion !== undefined) { + ProtocolVersion.encode(message.protocolVersion, writer.uint32(10).fork()).ldelim(); + } + + if (message.nodeId !== "") { + writer.uint32(18).string(message.nodeId); + } + + if (message.listenAddr !== "") { + writer.uint32(26).string(message.listenAddr); + } + + if (message.network !== "") { + writer.uint32(34).string(message.network); + } + + if (message.version !== "") { + writer.uint32(42).string(message.version); + } + + if (message.channels.length !== 0) { + writer.uint32(50).bytes(message.channels); + } + + if (message.moniker !== "") { + writer.uint32(58).string(message.moniker); + } + + if (message.other !== undefined) { + NodeInfoOther.encode(message.other, writer.uint32(66).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNodeInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.protocolVersion = ProtocolVersion.decode(reader, reader.uint32()); + break; + + case 2: + message.nodeId = reader.string(); + break; + + case 3: + message.listenAddr = reader.string(); + break; + + case 4: + message.network = reader.string(); + break; + + case 5: + message.version = reader.string(); + break; + + case 6: + message.channels = reader.bytes(); + break; + + case 7: + message.moniker = reader.string(); + break; + + case 8: + message.other = NodeInfoOther.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): NodeInfo { + const message = createBaseNodeInfo(); + message.protocolVersion = object.protocolVersion !== undefined && object.protocolVersion !== null ? ProtocolVersion.fromPartial(object.protocolVersion) : undefined; + message.nodeId = object.nodeId ?? ""; + message.listenAddr = object.listenAddr ?? ""; + message.network = object.network ?? ""; + message.version = object.version ?? ""; + message.channels = object.channels ?? new Uint8Array(); + message.moniker = object.moniker ?? ""; + message.other = object.other !== undefined && object.other !== null ? NodeInfoOther.fromPartial(object.other) : undefined; + return message; + } + +}; + +function createBaseNodeInfoOther(): NodeInfoOther { + return { + txIndex: "", + rpcAddress: "" + }; +} + +export const NodeInfoOther = { + encode(message: NodeInfoOther, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.txIndex !== "") { + writer.uint32(10).string(message.txIndex); + } + + if (message.rpcAddress !== "") { + writer.uint32(18).string(message.rpcAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NodeInfoOther { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNodeInfoOther(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.txIndex = reader.string(); + break; + + case 2: + message.rpcAddress = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): NodeInfoOther { + const message = createBaseNodeInfoOther(); + message.txIndex = object.txIndex ?? ""; + message.rpcAddress = object.rpcAddress ?? ""; + return message; + } + +}; + +function createBasePeerInfo(): PeerInfo { + return { + id: "", + addressInfo: [], + lastConnected: undefined + }; +} + +export const PeerInfo = { + encode(message: PeerInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + + for (const v of message.addressInfo) { + PeerAddressInfo.encode(v!, writer.uint32(18).fork()).ldelim(); + } + + if (message.lastConnected !== undefined) { + Timestamp.encode(toTimestamp(message.lastConnected), writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PeerInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePeerInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.id = reader.string(); + break; + + case 2: + message.addressInfo.push(PeerAddressInfo.decode(reader, reader.uint32())); + break; + + case 3: + message.lastConnected = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PeerInfo { + const message = createBasePeerInfo(); + message.id = object.id ?? ""; + message.addressInfo = object.addressInfo?.map(e => PeerAddressInfo.fromPartial(e)) || []; + message.lastConnected = object.lastConnected ?? undefined; + return message; + } + +}; + +function createBasePeerAddressInfo(): PeerAddressInfo { + return { + address: "", + lastDialSuccess: undefined, + lastDialFailure: undefined, + dialFailures: 0 + }; +} + +export const PeerAddressInfo = { + encode(message: PeerAddressInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + + if (message.lastDialSuccess !== undefined) { + Timestamp.encode(toTimestamp(message.lastDialSuccess), writer.uint32(18).fork()).ldelim(); + } + + if (message.lastDialFailure !== undefined) { + Timestamp.encode(toTimestamp(message.lastDialFailure), writer.uint32(26).fork()).ldelim(); + } + + if (message.dialFailures !== 0) { + writer.uint32(32).uint32(message.dialFailures); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PeerAddressInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePeerAddressInfo(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + + case 2: + message.lastDialSuccess = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 3: + message.lastDialFailure = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 4: + message.dialFailures = reader.uint32(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PeerAddressInfo { + const message = createBasePeerAddressInfo(); + message.address = object.address ?? ""; + message.lastDialSuccess = object.lastDialSuccess ?? undefined; + message.lastDialFailure = object.lastDialFailure ?? undefined; + message.dialFailures = object.dialFailures ?? 0; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/tendermint/types/block.ts b/packages/codegen/src/tendermint/types/block.ts new file mode 100644 index 00000000..1e384d23 --- /dev/null +++ b/packages/codegen/src/tendermint/types/block.ts @@ -0,0 +1,91 @@ +import { Header, HeaderSDKType, Data, DataSDKType, Commit, CommitSDKType } from "./types"; +import { EvidenceList, EvidenceListSDKType } from "./evidence"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial } from "../../helpers"; +export interface Block { + header?: Header; + data?: Data; + evidence?: EvidenceList; + lastCommit?: Commit; +} +export interface BlockSDKType { + header?: HeaderSDKType; + data?: DataSDKType; + evidence?: EvidenceListSDKType; + last_commit?: CommitSDKType; +} + +function createBaseBlock(): Block { + return { + header: undefined, + data: undefined, + evidence: undefined, + lastCommit: undefined + }; +} + +export const Block = { + encode(message: Block, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(10).fork()).ldelim(); + } + + if (message.data !== undefined) { + Data.encode(message.data, writer.uint32(18).fork()).ldelim(); + } + + if (message.evidence !== undefined) { + EvidenceList.encode(message.evidence, writer.uint32(26).fork()).ldelim(); + } + + if (message.lastCommit !== undefined) { + Commit.encode(message.lastCommit, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Block { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlock(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.header = Header.decode(reader, reader.uint32()); + break; + + case 2: + message.data = Data.decode(reader, reader.uint32()); + break; + + case 3: + message.evidence = EvidenceList.decode(reader, reader.uint32()); + break; + + case 4: + message.lastCommit = Commit.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Block { + const message = createBaseBlock(); + message.header = object.header !== undefined && object.header !== null ? Header.fromPartial(object.header) : undefined; + message.data = object.data !== undefined && object.data !== null ? Data.fromPartial(object.data) : undefined; + message.evidence = object.evidence !== undefined && object.evidence !== null ? EvidenceList.fromPartial(object.evidence) : undefined; + message.lastCommit = object.lastCommit !== undefined && object.lastCommit !== null ? Commit.fromPartial(object.lastCommit) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/tendermint/types/evidence.ts b/packages/codegen/src/tendermint/types/evidence.ts new file mode 100644 index 00000000..5854403c --- /dev/null +++ b/packages/codegen/src/tendermint/types/evidence.ts @@ -0,0 +1,325 @@ +import { Vote, VoteSDKType, LightBlock, LightBlockSDKType } from "./types"; +import { Timestamp } from "../../google/protobuf/timestamp"; +import { Validator, ValidatorSDKType } from "./validator"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long, toTimestamp, fromTimestamp } from "../../helpers"; +export interface Evidence { + duplicateVoteEvidence?: DuplicateVoteEvidence; + lightClientAttackEvidence?: LightClientAttackEvidence; +} +export interface EvidenceSDKType { + duplicate_vote_evidence?: DuplicateVoteEvidenceSDKType; + light_client_attack_evidence?: LightClientAttackEvidenceSDKType; +} +/** DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. */ + +export interface DuplicateVoteEvidence { + voteA?: Vote; + voteB?: Vote; + totalVotingPower: Long; + validatorPower: Long; + timestamp?: Date; +} +/** DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. */ + +export interface DuplicateVoteEvidenceSDKType { + vote_a?: VoteSDKType; + vote_b?: VoteSDKType; + total_voting_power: Long; + validator_power: Long; + timestamp?: Date; +} +/** LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. */ + +export interface LightClientAttackEvidence { + conflictingBlock?: LightBlock; + commonHeight: Long; + byzantineValidators: Validator[]; + totalVotingPower: Long; + timestamp?: Date; +} +/** LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. */ + +export interface LightClientAttackEvidenceSDKType { + conflicting_block?: LightBlockSDKType; + common_height: Long; + byzantine_validators: ValidatorSDKType[]; + total_voting_power: Long; + timestamp?: Date; +} +export interface EvidenceList { + evidence: Evidence[]; +} +export interface EvidenceListSDKType { + evidence: EvidenceSDKType[]; +} + +function createBaseEvidence(): Evidence { + return { + duplicateVoteEvidence: undefined, + lightClientAttackEvidence: undefined + }; +} + +export const Evidence = { + encode(message: Evidence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.duplicateVoteEvidence !== undefined) { + DuplicateVoteEvidence.encode(message.duplicateVoteEvidence, writer.uint32(10).fork()).ldelim(); + } + + if (message.lightClientAttackEvidence !== undefined) { + LightClientAttackEvidence.encode(message.lightClientAttackEvidence, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Evidence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEvidence(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.duplicateVoteEvidence = DuplicateVoteEvidence.decode(reader, reader.uint32()); + break; + + case 2: + message.lightClientAttackEvidence = LightClientAttackEvidence.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Evidence { + const message = createBaseEvidence(); + message.duplicateVoteEvidence = object.duplicateVoteEvidence !== undefined && object.duplicateVoteEvidence !== null ? DuplicateVoteEvidence.fromPartial(object.duplicateVoteEvidence) : undefined; + message.lightClientAttackEvidence = object.lightClientAttackEvidence !== undefined && object.lightClientAttackEvidence !== null ? LightClientAttackEvidence.fromPartial(object.lightClientAttackEvidence) : undefined; + return message; + } + +}; + +function createBaseDuplicateVoteEvidence(): DuplicateVoteEvidence { + return { + voteA: undefined, + voteB: undefined, + totalVotingPower: Long.ZERO, + validatorPower: Long.ZERO, + timestamp: undefined + }; +} + +export const DuplicateVoteEvidence = { + encode(message: DuplicateVoteEvidence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.voteA !== undefined) { + Vote.encode(message.voteA, writer.uint32(10).fork()).ldelim(); + } + + if (message.voteB !== undefined) { + Vote.encode(message.voteB, writer.uint32(18).fork()).ldelim(); + } + + if (!message.totalVotingPower.isZero()) { + writer.uint32(24).int64(message.totalVotingPower); + } + + if (!message.validatorPower.isZero()) { + writer.uint32(32).int64(message.validatorPower); + } + + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(42).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DuplicateVoteEvidence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDuplicateVoteEvidence(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.voteA = Vote.decode(reader, reader.uint32()); + break; + + case 2: + message.voteB = Vote.decode(reader, reader.uint32()); + break; + + case 3: + message.totalVotingPower = (reader.int64() as Long); + break; + + case 4: + message.validatorPower = (reader.int64() as Long); + break; + + case 5: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): DuplicateVoteEvidence { + const message = createBaseDuplicateVoteEvidence(); + message.voteA = object.voteA !== undefined && object.voteA !== null ? Vote.fromPartial(object.voteA) : undefined; + message.voteB = object.voteB !== undefined && object.voteB !== null ? Vote.fromPartial(object.voteB) : undefined; + message.totalVotingPower = object.totalVotingPower !== undefined && object.totalVotingPower !== null ? Long.fromValue(object.totalVotingPower) : Long.ZERO; + message.validatorPower = object.validatorPower !== undefined && object.validatorPower !== null ? Long.fromValue(object.validatorPower) : Long.ZERO; + message.timestamp = object.timestamp ?? undefined; + return message; + } + +}; + +function createBaseLightClientAttackEvidence(): LightClientAttackEvidence { + return { + conflictingBlock: undefined, + commonHeight: Long.ZERO, + byzantineValidators: [], + totalVotingPower: Long.ZERO, + timestamp: undefined + }; +} + +export const LightClientAttackEvidence = { + encode(message: LightClientAttackEvidence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.conflictingBlock !== undefined) { + LightBlock.encode(message.conflictingBlock, writer.uint32(10).fork()).ldelim(); + } + + if (!message.commonHeight.isZero()) { + writer.uint32(16).int64(message.commonHeight); + } + + for (const v of message.byzantineValidators) { + Validator.encode(v!, writer.uint32(26).fork()).ldelim(); + } + + if (!message.totalVotingPower.isZero()) { + writer.uint32(32).int64(message.totalVotingPower); + } + + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(42).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): LightClientAttackEvidence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseLightClientAttackEvidence(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.conflictingBlock = LightBlock.decode(reader, reader.uint32()); + break; + + case 2: + message.commonHeight = (reader.int64() as Long); + break; + + case 3: + message.byzantineValidators.push(Validator.decode(reader, reader.uint32())); + break; + + case 4: + message.totalVotingPower = (reader.int64() as Long); + break; + + case 5: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): LightClientAttackEvidence { + const message = createBaseLightClientAttackEvidence(); + message.conflictingBlock = object.conflictingBlock !== undefined && object.conflictingBlock !== null ? LightBlock.fromPartial(object.conflictingBlock) : undefined; + message.commonHeight = object.commonHeight !== undefined && object.commonHeight !== null ? Long.fromValue(object.commonHeight) : Long.ZERO; + message.byzantineValidators = object.byzantineValidators?.map(e => Validator.fromPartial(e)) || []; + message.totalVotingPower = object.totalVotingPower !== undefined && object.totalVotingPower !== null ? Long.fromValue(object.totalVotingPower) : Long.ZERO; + message.timestamp = object.timestamp ?? undefined; + return message; + } + +}; + +function createBaseEvidenceList(): EvidenceList { + return { + evidence: [] + }; +} + +export const EvidenceList = { + encode(message: EvidenceList, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.evidence) { + Evidence.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EvidenceList { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEvidenceList(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.evidence.push(Evidence.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EvidenceList { + const message = createBaseEvidenceList(); + message.evidence = object.evidence?.map(e => Evidence.fromPartial(e)) || []; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/tendermint/types/params.ts b/packages/codegen/src/tendermint/types/params.ts new file mode 100644 index 00000000..b34abd82 --- /dev/null +++ b/packages/codegen/src/tendermint/types/params.ts @@ -0,0 +1,485 @@ +import { Duration, DurationSDKType } from "../../google/protobuf/duration"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long } from "../../helpers"; +/** + * ConsensusParams contains consensus critical parameters that determine the + * validity of blocks. + */ + +export interface ConsensusParams { + block?: BlockParams; + evidence?: EvidenceParams; + validator?: ValidatorParams; + version?: VersionParams; +} +/** + * ConsensusParams contains consensus critical parameters that determine the + * validity of blocks. + */ + +export interface ConsensusParamsSDKType { + block?: BlockParamsSDKType; + evidence?: EvidenceParamsSDKType; + validator?: ValidatorParamsSDKType; + version?: VersionParamsSDKType; +} +/** BlockParams contains limits on the block size. */ + +export interface BlockParams { + /** + * Max block size, in bytes. + * Note: must be greater than 0 + */ + maxBytes: Long; + /** + * Max gas per block. + * Note: must be greater or equal to -1 + */ + + maxGas: Long; + /** + * Minimum time increment between consecutive blocks (in milliseconds) If the + * block header timestamp is ahead of the system clock, decrease this value. + * + * Not exposed to the application. + */ + + timeIotaMs: Long; +} +/** BlockParams contains limits on the block size. */ + +export interface BlockParamsSDKType { + max_bytes: Long; + max_gas: Long; + time_iota_ms: Long; +} +/** EvidenceParams determine how we handle evidence of malfeasance. */ + +export interface EvidenceParams { + /** + * Max age of evidence, in blocks. + * + * The basic formula for calculating this is: MaxAgeDuration / {average block + * time}. + */ + maxAgeNumBlocks: Long; + /** + * Max age of evidence, in time. + * + * It should correspond with an app's "unbonding period" or other similar + * mechanism for handling [Nothing-At-Stake + * attacks](https://github.com/ethereum/wiki/wiki/Proof-of-Stake-FAQ#what-is-the-nothing-at-stake-problem-and-how-can-it-be-fixed). + */ + + maxAgeDuration?: Duration; + /** + * This sets the maximum size of total evidence in bytes that can be committed in a single block. + * and should fall comfortably under the max block bytes. + * Default is 1048576 or 1MB + */ + + maxBytes: Long; +} +/** EvidenceParams determine how we handle evidence of malfeasance. */ + +export interface EvidenceParamsSDKType { + max_age_num_blocks: Long; + max_age_duration?: DurationSDKType; + max_bytes: Long; +} +/** + * ValidatorParams restrict the public key types validators can use. + * NOTE: uses ABCI pubkey naming, not Amino names. + */ + +export interface ValidatorParams { + pubKeyTypes: string[]; +} +/** + * ValidatorParams restrict the public key types validators can use. + * NOTE: uses ABCI pubkey naming, not Amino names. + */ + +export interface ValidatorParamsSDKType { + pub_key_types: string[]; +} +/** VersionParams contains the ABCI application version. */ + +export interface VersionParams { + appVersion: Long; +} +/** VersionParams contains the ABCI application version. */ + +export interface VersionParamsSDKType { + app_version: Long; +} +/** + * HashedParams is a subset of ConsensusParams. + * + * It is hashed into the Header.ConsensusHash. + */ + +export interface HashedParams { + blockMaxBytes: Long; + blockMaxGas: Long; +} +/** + * HashedParams is a subset of ConsensusParams. + * + * It is hashed into the Header.ConsensusHash. + */ + +export interface HashedParamsSDKType { + block_max_bytes: Long; + block_max_gas: Long; +} + +function createBaseConsensusParams(): ConsensusParams { + return { + block: undefined, + evidence: undefined, + validator: undefined, + version: undefined + }; +} + +export const ConsensusParams = { + encode(message: ConsensusParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.block !== undefined) { + BlockParams.encode(message.block, writer.uint32(10).fork()).ldelim(); + } + + if (message.evidence !== undefined) { + EvidenceParams.encode(message.evidence, writer.uint32(18).fork()).ldelim(); + } + + if (message.validator !== undefined) { + ValidatorParams.encode(message.validator, writer.uint32(26).fork()).ldelim(); + } + + if (message.version !== undefined) { + VersionParams.encode(message.version, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensusParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.block = BlockParams.decode(reader, reader.uint32()); + break; + + case 2: + message.evidence = EvidenceParams.decode(reader, reader.uint32()); + break; + + case 3: + message.validator = ValidatorParams.decode(reader, reader.uint32()); + break; + + case 4: + message.version = VersionParams.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ConsensusParams { + const message = createBaseConsensusParams(); + message.block = object.block !== undefined && object.block !== null ? BlockParams.fromPartial(object.block) : undefined; + message.evidence = object.evidence !== undefined && object.evidence !== null ? EvidenceParams.fromPartial(object.evidence) : undefined; + message.validator = object.validator !== undefined && object.validator !== null ? ValidatorParams.fromPartial(object.validator) : undefined; + message.version = object.version !== undefined && object.version !== null ? VersionParams.fromPartial(object.version) : undefined; + return message; + } + +}; + +function createBaseBlockParams(): BlockParams { + return { + maxBytes: Long.ZERO, + maxGas: Long.ZERO, + timeIotaMs: Long.ZERO + }; +} + +export const BlockParams = { + encode(message: BlockParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.maxBytes.isZero()) { + writer.uint32(8).int64(message.maxBytes); + } + + if (!message.maxGas.isZero()) { + writer.uint32(16).int64(message.maxGas); + } + + if (!message.timeIotaMs.isZero()) { + writer.uint32(24).int64(message.timeIotaMs); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BlockParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.maxBytes = (reader.int64() as Long); + break; + + case 2: + message.maxGas = (reader.int64() as Long); + break; + + case 3: + message.timeIotaMs = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BlockParams { + const message = createBaseBlockParams(); + message.maxBytes = object.maxBytes !== undefined && object.maxBytes !== null ? Long.fromValue(object.maxBytes) : Long.ZERO; + message.maxGas = object.maxGas !== undefined && object.maxGas !== null ? Long.fromValue(object.maxGas) : Long.ZERO; + message.timeIotaMs = object.timeIotaMs !== undefined && object.timeIotaMs !== null ? Long.fromValue(object.timeIotaMs) : Long.ZERO; + return message; + } + +}; + +function createBaseEvidenceParams(): EvidenceParams { + return { + maxAgeNumBlocks: Long.ZERO, + maxAgeDuration: undefined, + maxBytes: Long.ZERO + }; +} + +export const EvidenceParams = { + encode(message: EvidenceParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.maxAgeNumBlocks.isZero()) { + writer.uint32(8).int64(message.maxAgeNumBlocks); + } + + if (message.maxAgeDuration !== undefined) { + Duration.encode(message.maxAgeDuration, writer.uint32(18).fork()).ldelim(); + } + + if (!message.maxBytes.isZero()) { + writer.uint32(24).int64(message.maxBytes); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EvidenceParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEvidenceParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.maxAgeNumBlocks = (reader.int64() as Long); + break; + + case 2: + message.maxAgeDuration = Duration.decode(reader, reader.uint32()); + break; + + case 3: + message.maxBytes = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): EvidenceParams { + const message = createBaseEvidenceParams(); + message.maxAgeNumBlocks = object.maxAgeNumBlocks !== undefined && object.maxAgeNumBlocks !== null ? Long.fromValue(object.maxAgeNumBlocks) : Long.ZERO; + message.maxAgeDuration = object.maxAgeDuration !== undefined && object.maxAgeDuration !== null ? Duration.fromPartial(object.maxAgeDuration) : undefined; + message.maxBytes = object.maxBytes !== undefined && object.maxBytes !== null ? Long.fromValue(object.maxBytes) : Long.ZERO; + return message; + } + +}; + +function createBaseValidatorParams(): ValidatorParams { + return { + pubKeyTypes: [] + }; +} + +export const ValidatorParams = { + encode(message: ValidatorParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.pubKeyTypes) { + writer.uint32(10).string(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pubKeyTypes.push(reader.string()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorParams { + const message = createBaseValidatorParams(); + message.pubKeyTypes = object.pubKeyTypes?.map(e => e) || []; + return message; + } + +}; + +function createBaseVersionParams(): VersionParams { + return { + appVersion: Long.UZERO + }; +} + +export const VersionParams = { + encode(message: VersionParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.appVersion.isZero()) { + writer.uint32(8).uint64(message.appVersion); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VersionParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVersionParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.appVersion = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): VersionParams { + const message = createBaseVersionParams(); + message.appVersion = object.appVersion !== undefined && object.appVersion !== null ? Long.fromValue(object.appVersion) : Long.UZERO; + return message; + } + +}; + +function createBaseHashedParams(): HashedParams { + return { + blockMaxBytes: Long.ZERO, + blockMaxGas: Long.ZERO + }; +} + +export const HashedParams = { + encode(message: HashedParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.blockMaxBytes.isZero()) { + writer.uint32(8).int64(message.blockMaxBytes); + } + + if (!message.blockMaxGas.isZero()) { + writer.uint32(16).int64(message.blockMaxGas); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HashedParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHashedParams(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.blockMaxBytes = (reader.int64() as Long); + break; + + case 2: + message.blockMaxGas = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): HashedParams { + const message = createBaseHashedParams(); + message.blockMaxBytes = object.blockMaxBytes !== undefined && object.blockMaxBytes !== null ? Long.fromValue(object.blockMaxBytes) : Long.ZERO; + message.blockMaxGas = object.blockMaxGas !== undefined && object.blockMaxGas !== null ? Long.fromValue(object.blockMaxGas) : Long.ZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/tendermint/types/types.ts b/packages/codegen/src/tendermint/types/types.ts new file mode 100644 index 00000000..d3b3761f --- /dev/null +++ b/packages/codegen/src/tendermint/types/types.ts @@ -0,0 +1,1357 @@ +import { Proof, ProofSDKType } from "../crypto/proof"; +import { Consensus, ConsensusSDKType } from "../version/types"; +import { Timestamp } from "../../google/protobuf/timestamp"; +import { ValidatorSet, ValidatorSetSDKType } from "./validator"; +import * as _m0 from "protobufjs/minimal"; +import { DeepPartial, Long, toTimestamp, fromTimestamp } from "../../helpers"; +/** BlockIdFlag indicates which BlcokID the signature is for */ + +export enum BlockIDFlag { + BLOCK_ID_FLAG_UNKNOWN = 0, + BLOCK_ID_FLAG_ABSENT = 1, + BLOCK_ID_FLAG_COMMIT = 2, + BLOCK_ID_FLAG_NIL = 3, + UNRECOGNIZED = -1, +} +export const BlockIDFlagSDKType = BlockIDFlag; +export function blockIDFlagFromJSON(object: any): BlockIDFlag { + switch (object) { + case 0: + case "BLOCK_ID_FLAG_UNKNOWN": + return BlockIDFlag.BLOCK_ID_FLAG_UNKNOWN; + + case 1: + case "BLOCK_ID_FLAG_ABSENT": + return BlockIDFlag.BLOCK_ID_FLAG_ABSENT; + + case 2: + case "BLOCK_ID_FLAG_COMMIT": + return BlockIDFlag.BLOCK_ID_FLAG_COMMIT; + + case 3: + case "BLOCK_ID_FLAG_NIL": + return BlockIDFlag.BLOCK_ID_FLAG_NIL; + + case -1: + case "UNRECOGNIZED": + default: + return BlockIDFlag.UNRECOGNIZED; + } +} +export function blockIDFlagToJSON(object: BlockIDFlag): string { + switch (object) { + case BlockIDFlag.BLOCK_ID_FLAG_UNKNOWN: + return "BLOCK_ID_FLAG_UNKNOWN"; + + case BlockIDFlag.BLOCK_ID_FLAG_ABSENT: + return "BLOCK_ID_FLAG_ABSENT"; + + case BlockIDFlag.BLOCK_ID_FLAG_COMMIT: + return "BLOCK_ID_FLAG_COMMIT"; + + case BlockIDFlag.BLOCK_ID_FLAG_NIL: + return "BLOCK_ID_FLAG_NIL"; + + case BlockIDFlag.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** SignedMsgType is a type of signed message in the consensus. */ + +export enum SignedMsgType { + SIGNED_MSG_TYPE_UNKNOWN = 0, + + /** SIGNED_MSG_TYPE_PREVOTE - Votes */ + SIGNED_MSG_TYPE_PREVOTE = 1, + SIGNED_MSG_TYPE_PRECOMMIT = 2, + + /** SIGNED_MSG_TYPE_PROPOSAL - Proposals */ + SIGNED_MSG_TYPE_PROPOSAL = 32, + UNRECOGNIZED = -1, +} +export const SignedMsgTypeSDKType = SignedMsgType; +export function signedMsgTypeFromJSON(object: any): SignedMsgType { + switch (object) { + case 0: + case "SIGNED_MSG_TYPE_UNKNOWN": + return SignedMsgType.SIGNED_MSG_TYPE_UNKNOWN; + + case 1: + case "SIGNED_MSG_TYPE_PREVOTE": + return SignedMsgType.SIGNED_MSG_TYPE_PREVOTE; + + case 2: + case "SIGNED_MSG_TYPE_PRECOMMIT": + return SignedMsgType.SIGNED_MSG_TYPE_PRECOMMIT; + + case 32: + case "SIGNED_MSG_TYPE_PROPOSAL": + return SignedMsgType.SIGNED_MSG_TYPE_PROPOSAL; + + case -1: + case "UNRECOGNIZED": + default: + return SignedMsgType.UNRECOGNIZED; + } +} +export function signedMsgTypeToJSON(object: SignedMsgType): string { + switch (object) { + case SignedMsgType.SIGNED_MSG_TYPE_UNKNOWN: + return "SIGNED_MSG_TYPE_UNKNOWN"; + + case SignedMsgType.SIGNED_MSG_TYPE_PREVOTE: + return "SIGNED_MSG_TYPE_PREVOTE"; + + case SignedMsgType.SIGNED_MSG_TYPE_PRECOMMIT: + return "SIGNED_MSG_TYPE_PRECOMMIT"; + + case SignedMsgType.SIGNED_MSG_TYPE_PROPOSAL: + return "SIGNED_MSG_TYPE_PROPOSAL"; + + case SignedMsgType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +/** PartsetHeader */ + +export interface PartSetHeader { + total: number; + hash: Uint8Array; +} +/** PartsetHeader */ + +export interface PartSetHeaderSDKType { + total: number; + hash: Uint8Array; +} +export interface Part { + index: number; + bytes: Uint8Array; + proof?: Proof; +} +export interface PartSDKType { + index: number; + bytes: Uint8Array; + proof?: ProofSDKType; +} +/** BlockID */ + +export interface BlockID { + hash: Uint8Array; + partSetHeader?: PartSetHeader; +} +/** BlockID */ + +export interface BlockIDSDKType { + hash: Uint8Array; + part_set_header?: PartSetHeaderSDKType; +} +/** Header defines the structure of a Tendermint block header. */ + +export interface Header { + /** basic block info */ + version?: Consensus; + chainId: string; + height: Long; + time?: Date; + /** prev block info */ + + lastBlockId?: BlockID; + /** hashes of block data */ + + lastCommitHash: Uint8Array; + dataHash: Uint8Array; + /** hashes from the app output from the prev block */ + + validatorsHash: Uint8Array; + /** validators for the next block */ + + nextValidatorsHash: Uint8Array; + /** consensus params for current block */ + + consensusHash: Uint8Array; + /** state after txs from the previous block */ + + appHash: Uint8Array; + lastResultsHash: Uint8Array; + /** consensus info */ + + evidenceHash: Uint8Array; + /** original proposer of the block */ + + proposerAddress: Uint8Array; +} +/** Header defines the structure of a Tendermint block header. */ + +export interface HeaderSDKType { + version?: ConsensusSDKType; + chain_id: string; + height: Long; + time?: Date; + last_block_id?: BlockIDSDKType; + last_commit_hash: Uint8Array; + data_hash: Uint8Array; + validators_hash: Uint8Array; + next_validators_hash: Uint8Array; + consensus_hash: Uint8Array; + app_hash: Uint8Array; + last_results_hash: Uint8Array; + evidence_hash: Uint8Array; + proposer_address: Uint8Array; +} +/** Data contains the set of transactions included in the block */ + +export interface Data { + /** + * Txs that will be applied by state @ block.Height+1. + * NOTE: not all txs here are valid. We're just agreeing on the order first. + * This means that block.AppHash does not include these txs. + */ + txs: Uint8Array[]; +} +/** Data contains the set of transactions included in the block */ + +export interface DataSDKType { + txs: Uint8Array[]; +} +/** + * Vote represents a prevote, precommit, or commit vote from validators for + * consensus. + */ + +export interface Vote { + type: SignedMsgType; + height: Long; + round: number; + /** zero if vote is nil. */ + + blockId?: BlockID; + timestamp?: Date; + validatorAddress: Uint8Array; + validatorIndex: number; + signature: Uint8Array; +} +/** + * Vote represents a prevote, precommit, or commit vote from validators for + * consensus. + */ + +export interface VoteSDKType { + type: SignedMsgType; + height: Long; + round: number; + block_id?: BlockIDSDKType; + timestamp?: Date; + validator_address: Uint8Array; + validator_index: number; + signature: Uint8Array; +} +/** Commit contains the evidence that a block was committed by a set of validators. */ + +export interface Commit { + height: Long; + round: number; + blockId?: BlockID; + signatures: CommitSig[]; +} +/** Commit contains the evidence that a block was committed by a set of validators. */ + +export interface CommitSDKType { + height: Long; + round: number; + block_id?: BlockIDSDKType; + signatures: CommitSigSDKType[]; +} +/** CommitSig is a part of the Vote included in a Commit. */ + +export interface CommitSig { + blockIdFlag: BlockIDFlag; + validatorAddress: Uint8Array; + timestamp?: Date; + signature: Uint8Array; +} +/** CommitSig is a part of the Vote included in a Commit. */ + +export interface CommitSigSDKType { + block_id_flag: BlockIDFlag; + validator_address: Uint8Array; + timestamp?: Date; + signature: Uint8Array; +} +export interface Proposal { + type: SignedMsgType; + height: Long; + round: number; + polRound: number; + blockId?: BlockID; + timestamp?: Date; + signature: Uint8Array; +} +export interface ProposalSDKType { + type: SignedMsgType; + height: Long; + round: number; + pol_round: number; + block_id?: BlockIDSDKType; + timestamp?: Date; + signature: Uint8Array; +} +export interface SignedHeader { + header?: Header; + commit?: Commit; +} +export interface SignedHeaderSDKType { + header?: HeaderSDKType; + commit?: CommitSDKType; +} +export interface LightBlock { + signedHeader?: SignedHeader; + validatorSet?: ValidatorSet; +} +export interface LightBlockSDKType { + signed_header?: SignedHeaderSDKType; + validator_set?: ValidatorSetSDKType; +} +export interface BlockMeta { + blockId?: BlockID; + blockSize: Long; + header?: Header; + numTxs: Long; +} +export interface BlockMetaSDKType { + block_id?: BlockIDSDKType; + block_size: Long; + header?: HeaderSDKType; + num_txs: Long; +} +/** TxProof represents a Merkle proof of the presence of a transaction in the Merkle tree. */ + +export interface TxProof { + rootHash: Uint8Array; + data: Uint8Array; + proof?: Proof; +} +/** TxProof represents a Merkle proof of the presence of a transaction in the Merkle tree. */ + +export interface TxProofSDKType { + root_hash: Uint8Array; + data: Uint8Array; + proof?: ProofSDKType; +} + +function createBasePartSetHeader(): PartSetHeader { + return { + total: 0, + hash: new Uint8Array() + }; +} + +export const PartSetHeader = { + encode(message: PartSetHeader, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.total !== 0) { + writer.uint32(8).uint32(message.total); + } + + if (message.hash.length !== 0) { + writer.uint32(18).bytes(message.hash); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PartSetHeader { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePartSetHeader(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.total = reader.uint32(); + break; + + case 2: + message.hash = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): PartSetHeader { + const message = createBasePartSetHeader(); + message.total = object.total ?? 0; + message.hash = object.hash ?? new Uint8Array(); + return message; + } + +}; + +function createBasePart(): Part { + return { + index: 0, + bytes: new Uint8Array(), + proof: undefined + }; +} + +export const Part = { + encode(message: Part, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.index !== 0) { + writer.uint32(8).uint32(message.index); + } + + if (message.bytes.length !== 0) { + writer.uint32(18).bytes(message.bytes); + } + + if (message.proof !== undefined) { + Proof.encode(message.proof, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Part { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePart(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.index = reader.uint32(); + break; + + case 2: + message.bytes = reader.bytes(); + break; + + case 3: + message.proof = Proof.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Part { + const message = createBasePart(); + message.index = object.index ?? 0; + message.bytes = object.bytes ?? new Uint8Array(); + message.proof = object.proof !== undefined && object.proof !== null ? Proof.fromPartial(object.proof) : undefined; + return message; + } + +}; + +function createBaseBlockID(): BlockID { + return { + hash: new Uint8Array(), + partSetHeader: undefined + }; +} + +export const BlockID = { + encode(message: BlockID, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hash.length !== 0) { + writer.uint32(10).bytes(message.hash); + } + + if (message.partSetHeader !== undefined) { + PartSetHeader.encode(message.partSetHeader, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BlockID { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockID(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.hash = reader.bytes(); + break; + + case 2: + message.partSetHeader = PartSetHeader.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BlockID { + const message = createBaseBlockID(); + message.hash = object.hash ?? new Uint8Array(); + message.partSetHeader = object.partSetHeader !== undefined && object.partSetHeader !== null ? PartSetHeader.fromPartial(object.partSetHeader) : undefined; + return message; + } + +}; + +function createBaseHeader(): Header { + return { + version: undefined, + chainId: "", + height: Long.ZERO, + time: undefined, + lastBlockId: undefined, + lastCommitHash: new Uint8Array(), + dataHash: new Uint8Array(), + validatorsHash: new Uint8Array(), + nextValidatorsHash: new Uint8Array(), + consensusHash: new Uint8Array(), + appHash: new Uint8Array(), + lastResultsHash: new Uint8Array(), + evidenceHash: new Uint8Array(), + proposerAddress: new Uint8Array() + }; +} + +export const Header = { + encode(message: Header, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.version !== undefined) { + Consensus.encode(message.version, writer.uint32(10).fork()).ldelim(); + } + + if (message.chainId !== "") { + writer.uint32(18).string(message.chainId); + } + + if (!message.height.isZero()) { + writer.uint32(24).int64(message.height); + } + + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(34).fork()).ldelim(); + } + + if (message.lastBlockId !== undefined) { + BlockID.encode(message.lastBlockId, writer.uint32(42).fork()).ldelim(); + } + + if (message.lastCommitHash.length !== 0) { + writer.uint32(50).bytes(message.lastCommitHash); + } + + if (message.dataHash.length !== 0) { + writer.uint32(58).bytes(message.dataHash); + } + + if (message.validatorsHash.length !== 0) { + writer.uint32(66).bytes(message.validatorsHash); + } + + if (message.nextValidatorsHash.length !== 0) { + writer.uint32(74).bytes(message.nextValidatorsHash); + } + + if (message.consensusHash.length !== 0) { + writer.uint32(82).bytes(message.consensusHash); + } + + if (message.appHash.length !== 0) { + writer.uint32(90).bytes(message.appHash); + } + + if (message.lastResultsHash.length !== 0) { + writer.uint32(98).bytes(message.lastResultsHash); + } + + if (message.evidenceHash.length !== 0) { + writer.uint32(106).bytes(message.evidenceHash); + } + + if (message.proposerAddress.length !== 0) { + writer.uint32(114).bytes(message.proposerAddress); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Header { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHeader(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.version = Consensus.decode(reader, reader.uint32()); + break; + + case 2: + message.chainId = reader.string(); + break; + + case 3: + message.height = (reader.int64() as Long); + break; + + case 4: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 5: + message.lastBlockId = BlockID.decode(reader, reader.uint32()); + break; + + case 6: + message.lastCommitHash = reader.bytes(); + break; + + case 7: + message.dataHash = reader.bytes(); + break; + + case 8: + message.validatorsHash = reader.bytes(); + break; + + case 9: + message.nextValidatorsHash = reader.bytes(); + break; + + case 10: + message.consensusHash = reader.bytes(); + break; + + case 11: + message.appHash = reader.bytes(); + break; + + case 12: + message.lastResultsHash = reader.bytes(); + break; + + case 13: + message.evidenceHash = reader.bytes(); + break; + + case 14: + message.proposerAddress = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial
): Header { + const message = createBaseHeader(); + message.version = object.version !== undefined && object.version !== null ? Consensus.fromPartial(object.version) : undefined; + message.chainId = object.chainId ?? ""; + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.time = object.time ?? undefined; + message.lastBlockId = object.lastBlockId !== undefined && object.lastBlockId !== null ? BlockID.fromPartial(object.lastBlockId) : undefined; + message.lastCommitHash = object.lastCommitHash ?? new Uint8Array(); + message.dataHash = object.dataHash ?? new Uint8Array(); + message.validatorsHash = object.validatorsHash ?? new Uint8Array(); + message.nextValidatorsHash = object.nextValidatorsHash ?? new Uint8Array(); + message.consensusHash = object.consensusHash ?? new Uint8Array(); + message.appHash = object.appHash ?? new Uint8Array(); + message.lastResultsHash = object.lastResultsHash ?? new Uint8Array(); + message.evidenceHash = object.evidenceHash ?? new Uint8Array(); + message.proposerAddress = object.proposerAddress ?? new Uint8Array(); + return message; + } + +}; + +function createBaseData(): Data { + return { + txs: [] + }; +} + +export const Data = { + encode(message: Data, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.txs) { + writer.uint32(10).bytes(v!); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Data { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseData(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.txs.push(reader.bytes()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Data { + const message = createBaseData(); + message.txs = object.txs?.map(e => e) || []; + return message; + } + +}; + +function createBaseVote(): Vote { + return { + type: 0, + height: Long.ZERO, + round: 0, + blockId: undefined, + timestamp: undefined, + validatorAddress: new Uint8Array(), + validatorIndex: 0, + signature: new Uint8Array() + }; +} + +export const Vote = { + encode(message: Vote, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== 0) { + writer.uint32(8).int32(message.type); + } + + if (!message.height.isZero()) { + writer.uint32(16).int64(message.height); + } + + if (message.round !== 0) { + writer.uint32(24).int32(message.round); + } + + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(34).fork()).ldelim(); + } + + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(42).fork()).ldelim(); + } + + if (message.validatorAddress.length !== 0) { + writer.uint32(50).bytes(message.validatorAddress); + } + + if (message.validatorIndex !== 0) { + writer.uint32(56).int32(message.validatorIndex); + } + + if (message.signature.length !== 0) { + writer.uint32(66).bytes(message.signature); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Vote { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVote(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.type = (reader.int32() as any); + break; + + case 2: + message.height = (reader.int64() as Long); + break; + + case 3: + message.round = reader.int32(); + break; + + case 4: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + + case 5: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 6: + message.validatorAddress = reader.bytes(); + break; + + case 7: + message.validatorIndex = reader.int32(); + break; + + case 8: + message.signature = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Vote { + const message = createBaseVote(); + message.type = object.type ?? 0; + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.round = object.round ?? 0; + message.blockId = object.blockId !== undefined && object.blockId !== null ? BlockID.fromPartial(object.blockId) : undefined; + message.timestamp = object.timestamp ?? undefined; + message.validatorAddress = object.validatorAddress ?? new Uint8Array(); + message.validatorIndex = object.validatorIndex ?? 0; + message.signature = object.signature ?? new Uint8Array(); + return message; + } + +}; + +function createBaseCommit(): Commit { + return { + height: Long.ZERO, + round: 0, + blockId: undefined, + signatures: [] + }; +} + +export const Commit = { + encode(message: Commit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.height.isZero()) { + writer.uint32(8).int64(message.height); + } + + if (message.round !== 0) { + writer.uint32(16).int32(message.round); + } + + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(26).fork()).ldelim(); + } + + for (const v of message.signatures) { + CommitSig.encode(v!, writer.uint32(34).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Commit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommit(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.height = (reader.int64() as Long); + break; + + case 2: + message.round = reader.int32(); + break; + + case 3: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + + case 4: + message.signatures.push(CommitSig.decode(reader, reader.uint32())); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Commit { + const message = createBaseCommit(); + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.round = object.round ?? 0; + message.blockId = object.blockId !== undefined && object.blockId !== null ? BlockID.fromPartial(object.blockId) : undefined; + message.signatures = object.signatures?.map(e => CommitSig.fromPartial(e)) || []; + return message; + } + +}; + +function createBaseCommitSig(): CommitSig { + return { + blockIdFlag: 0, + validatorAddress: new Uint8Array(), + timestamp: undefined, + signature: new Uint8Array() + }; +} + +export const CommitSig = { + encode(message: CommitSig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockIdFlag !== 0) { + writer.uint32(8).int32(message.blockIdFlag); + } + + if (message.validatorAddress.length !== 0) { + writer.uint32(18).bytes(message.validatorAddress); + } + + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(26).fork()).ldelim(); + } + + if (message.signature.length !== 0) { + writer.uint32(34).bytes(message.signature); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CommitSig { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommitSig(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.blockIdFlag = (reader.int32() as any); + break; + + case 2: + message.validatorAddress = reader.bytes(); + break; + + case 3: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 4: + message.signature = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): CommitSig { + const message = createBaseCommitSig(); + message.blockIdFlag = object.blockIdFlag ?? 0; + message.validatorAddress = object.validatorAddress ?? new Uint8Array(); + message.timestamp = object.timestamp ?? undefined; + message.signature = object.signature ?? new Uint8Array(); + return message; + } + +}; + +function createBaseProposal(): Proposal { + return { + type: 0, + height: Long.ZERO, + round: 0, + polRound: 0, + blockId: undefined, + timestamp: undefined, + signature: new Uint8Array() + }; +} + +export const Proposal = { + encode(message: Proposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== 0) { + writer.uint32(8).int32(message.type); + } + + if (!message.height.isZero()) { + writer.uint32(16).int64(message.height); + } + + if (message.round !== 0) { + writer.uint32(24).int32(message.round); + } + + if (message.polRound !== 0) { + writer.uint32(32).int32(message.polRound); + } + + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(42).fork()).ldelim(); + } + + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(50).fork()).ldelim(); + } + + if (message.signature.length !== 0) { + writer.uint32(58).bytes(message.signature); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Proposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProposal(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.type = (reader.int32() as any); + break; + + case 2: + message.height = (reader.int64() as Long); + break; + + case 3: + message.round = reader.int32(); + break; + + case 4: + message.polRound = reader.int32(); + break; + + case 5: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + + case 6: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + + case 7: + message.signature = reader.bytes(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Proposal { + const message = createBaseProposal(); + message.type = object.type ?? 0; + message.height = object.height !== undefined && object.height !== null ? Long.fromValue(object.height) : Long.ZERO; + message.round = object.round ?? 0; + message.polRound = object.polRound ?? 0; + message.blockId = object.blockId !== undefined && object.blockId !== null ? BlockID.fromPartial(object.blockId) : undefined; + message.timestamp = object.timestamp ?? undefined; + message.signature = object.signature ?? new Uint8Array(); + return message; + } + +}; + +function createBaseSignedHeader(): SignedHeader { + return { + header: undefined, + commit: undefined + }; +} + +export const SignedHeader = { + encode(message: SignedHeader, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(10).fork()).ldelim(); + } + + if (message.commit !== undefined) { + Commit.encode(message.commit, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignedHeader { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignedHeader(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.header = Header.decode(reader, reader.uint32()); + break; + + case 2: + message.commit = Commit.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SignedHeader { + const message = createBaseSignedHeader(); + message.header = object.header !== undefined && object.header !== null ? Header.fromPartial(object.header) : undefined; + message.commit = object.commit !== undefined && object.commit !== null ? Commit.fromPartial(object.commit) : undefined; + return message; + } + +}; + +function createBaseLightBlock(): LightBlock { + return { + signedHeader: undefined, + validatorSet: undefined + }; +} + +export const LightBlock = { + encode(message: LightBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signedHeader !== undefined) { + SignedHeader.encode(message.signedHeader, writer.uint32(10).fork()).ldelim(); + } + + if (message.validatorSet !== undefined) { + ValidatorSet.encode(message.validatorSet, writer.uint32(18).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): LightBlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseLightBlock(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.signedHeader = SignedHeader.decode(reader, reader.uint32()); + break; + + case 2: + message.validatorSet = ValidatorSet.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): LightBlock { + const message = createBaseLightBlock(); + message.signedHeader = object.signedHeader !== undefined && object.signedHeader !== null ? SignedHeader.fromPartial(object.signedHeader) : undefined; + message.validatorSet = object.validatorSet !== undefined && object.validatorSet !== null ? ValidatorSet.fromPartial(object.validatorSet) : undefined; + return message; + } + +}; + +function createBaseBlockMeta(): BlockMeta { + return { + blockId: undefined, + blockSize: Long.ZERO, + header: undefined, + numTxs: Long.ZERO + }; +} + +export const BlockMeta = { + encode(message: BlockMeta, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(10).fork()).ldelim(); + } + + if (!message.blockSize.isZero()) { + writer.uint32(16).int64(message.blockSize); + } + + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(26).fork()).ldelim(); + } + + if (!message.numTxs.isZero()) { + writer.uint32(32).int64(message.numTxs); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BlockMeta { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockMeta(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + + case 2: + message.blockSize = (reader.int64() as Long); + break; + + case 3: + message.header = Header.decode(reader, reader.uint32()); + break; + + case 4: + message.numTxs = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): BlockMeta { + const message = createBaseBlockMeta(); + message.blockId = object.blockId !== undefined && object.blockId !== null ? BlockID.fromPartial(object.blockId) : undefined; + message.blockSize = object.blockSize !== undefined && object.blockSize !== null ? Long.fromValue(object.blockSize) : Long.ZERO; + message.header = object.header !== undefined && object.header !== null ? Header.fromPartial(object.header) : undefined; + message.numTxs = object.numTxs !== undefined && object.numTxs !== null ? Long.fromValue(object.numTxs) : Long.ZERO; + return message; + } + +}; + +function createBaseTxProof(): TxProof { + return { + rootHash: new Uint8Array(), + data: new Uint8Array(), + proof: undefined + }; +} + +export const TxProof = { + encode(message: TxProof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.rootHash.length !== 0) { + writer.uint32(10).bytes(message.rootHash); + } + + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + + if (message.proof !== undefined) { + Proof.encode(message.proof, writer.uint32(26).fork()).ldelim(); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxProof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxProof(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.rootHash = reader.bytes(); + break; + + case 2: + message.data = reader.bytes(); + break; + + case 3: + message.proof = Proof.decode(reader, reader.uint32()); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): TxProof { + const message = createBaseTxProof(); + message.rootHash = object.rootHash ?? new Uint8Array(); + message.data = object.data ?? new Uint8Array(); + message.proof = object.proof !== undefined && object.proof !== null ? Proof.fromPartial(object.proof) : undefined; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/tendermint/types/validator.ts b/packages/codegen/src/tendermint/types/validator.ts new file mode 100644 index 00000000..c5faab39 --- /dev/null +++ b/packages/codegen/src/tendermint/types/validator.ts @@ -0,0 +1,228 @@ +import { PublicKey, PublicKeySDKType } from "../crypto/keys"; +import { Long, DeepPartial } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +export interface ValidatorSet { + validators: Validator[]; + proposer?: Validator; + totalVotingPower: Long; +} +export interface ValidatorSetSDKType { + validators: ValidatorSDKType[]; + proposer?: ValidatorSDKType; + total_voting_power: Long; +} +export interface Validator { + address: Uint8Array; + pubKey?: PublicKey; + votingPower: Long; + proposerPriority: Long; +} +export interface ValidatorSDKType { + address: Uint8Array; + pub_key?: PublicKeySDKType; + voting_power: Long; + proposer_priority: Long; +} +export interface SimpleValidator { + pubKey?: PublicKey; + votingPower: Long; +} +export interface SimpleValidatorSDKType { + pub_key?: PublicKeySDKType; + voting_power: Long; +} + +function createBaseValidatorSet(): ValidatorSet { + return { + validators: [], + proposer: undefined, + totalVotingPower: Long.ZERO + }; +} + +export const ValidatorSet = { + encode(message: ValidatorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.validators) { + Validator.encode(v!, writer.uint32(10).fork()).ldelim(); + } + + if (message.proposer !== undefined) { + Validator.encode(message.proposer, writer.uint32(18).fork()).ldelim(); + } + + if (!message.totalVotingPower.isZero()) { + writer.uint32(24).int64(message.totalVotingPower); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorSet(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.validators.push(Validator.decode(reader, reader.uint32())); + break; + + case 2: + message.proposer = Validator.decode(reader, reader.uint32()); + break; + + case 3: + message.totalVotingPower = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): ValidatorSet { + const message = createBaseValidatorSet(); + message.validators = object.validators?.map(e => Validator.fromPartial(e)) || []; + message.proposer = object.proposer !== undefined && object.proposer !== null ? Validator.fromPartial(object.proposer) : undefined; + message.totalVotingPower = object.totalVotingPower !== undefined && object.totalVotingPower !== null ? Long.fromValue(object.totalVotingPower) : Long.ZERO; + return message; + } + +}; + +function createBaseValidator(): Validator { + return { + address: new Uint8Array(), + pubKey: undefined, + votingPower: Long.ZERO, + proposerPriority: Long.ZERO + }; +} + +export const Validator = { + encode(message: Validator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address.length !== 0) { + writer.uint32(10).bytes(message.address); + } + + if (message.pubKey !== undefined) { + PublicKey.encode(message.pubKey, writer.uint32(18).fork()).ldelim(); + } + + if (!message.votingPower.isZero()) { + writer.uint32(24).int64(message.votingPower); + } + + if (!message.proposerPriority.isZero()) { + writer.uint32(32).int64(message.proposerPriority); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Validator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidator(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.address = reader.bytes(); + break; + + case 2: + message.pubKey = PublicKey.decode(reader, reader.uint32()); + break; + + case 3: + message.votingPower = (reader.int64() as Long); + break; + + case 4: + message.proposerPriority = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Validator { + const message = createBaseValidator(); + message.address = object.address ?? new Uint8Array(); + message.pubKey = object.pubKey !== undefined && object.pubKey !== null ? PublicKey.fromPartial(object.pubKey) : undefined; + message.votingPower = object.votingPower !== undefined && object.votingPower !== null ? Long.fromValue(object.votingPower) : Long.ZERO; + message.proposerPriority = object.proposerPriority !== undefined && object.proposerPriority !== null ? Long.fromValue(object.proposerPriority) : Long.ZERO; + return message; + } + +}; + +function createBaseSimpleValidator(): SimpleValidator { + return { + pubKey: undefined, + votingPower: Long.ZERO + }; +} + +export const SimpleValidator = { + encode(message: SimpleValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pubKey !== undefined) { + PublicKey.encode(message.pubKey, writer.uint32(10).fork()).ldelim(); + } + + if (!message.votingPower.isZero()) { + writer.uint32(16).int64(message.votingPower); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SimpleValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSimpleValidator(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.pubKey = PublicKey.decode(reader, reader.uint32()); + break; + + case 2: + message.votingPower = (reader.int64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): SimpleValidator { + const message = createBaseSimpleValidator(); + message.pubKey = object.pubKey !== undefined && object.pubKey !== null ? PublicKey.fromPartial(object.pubKey) : undefined; + message.votingPower = object.votingPower !== undefined && object.votingPower !== null ? Long.fromValue(object.votingPower) : Long.ZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/src/tendermint/version/types.ts b/packages/codegen/src/tendermint/version/types.ts new file mode 100644 index 00000000..7afc9846 --- /dev/null +++ b/packages/codegen/src/tendermint/version/types.ts @@ -0,0 +1,152 @@ +import { Long, DeepPartial } from "../../helpers"; +import * as _m0 from "protobufjs/minimal"; +/** + * App includes the protocol and software version for the application. + * This information is included in ResponseInfo. The App.Protocol can be + * updated in ResponseEndBlock. + */ + +export interface App { + protocol: Long; + software: string; +} +/** + * App includes the protocol and software version for the application. + * This information is included in ResponseInfo. The App.Protocol can be + * updated in ResponseEndBlock. + */ + +export interface AppSDKType { + protocol: Long; + software: string; +} +/** + * Consensus captures the consensus rules for processing a block in the blockchain, + * including all blockchain data structures and the rules of the application's + * state transition machine. + */ + +export interface Consensus { + block: Long; + app: Long; +} +/** + * Consensus captures the consensus rules for processing a block in the blockchain, + * including all blockchain data structures and the rules of the application's + * state transition machine. + */ + +export interface ConsensusSDKType { + block: Long; + app: Long; +} + +function createBaseApp(): App { + return { + protocol: Long.UZERO, + software: "" + }; +} + +export const App = { + encode(message: App, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.protocol.isZero()) { + writer.uint32(8).uint64(message.protocol); + } + + if (message.software !== "") { + writer.uint32(18).string(message.software); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): App { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseApp(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.protocol = (reader.uint64() as Long); + break; + + case 2: + message.software = reader.string(); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): App { + const message = createBaseApp(); + message.protocol = object.protocol !== undefined && object.protocol !== null ? Long.fromValue(object.protocol) : Long.UZERO; + message.software = object.software ?? ""; + return message; + } + +}; + +function createBaseConsensus(): Consensus { + return { + block: Long.UZERO, + app: Long.UZERO + }; +} + +export const Consensus = { + encode(message: Consensus, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (!message.block.isZero()) { + writer.uint32(8).uint64(message.block); + } + + if (!message.app.isZero()) { + writer.uint32(16).uint64(message.app); + } + + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Consensus { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensus(); + + while (reader.pos < end) { + const tag = reader.uint32(); + + switch (tag >>> 3) { + case 1: + message.block = (reader.uint64() as Long); + break; + + case 2: + message.app = (reader.uint64() as Long); + break; + + default: + reader.skipType(tag & 7); + break; + } + } + + return message; + }, + + fromPartial(object: DeepPartial): Consensus { + const message = createBaseConsensus(); + message.block = object.block !== undefined && object.block !== null ? Long.fromValue(object.block) : Long.UZERO; + message.app = object.app !== undefined && object.app !== null ? Long.fromValue(object.app) : Long.UZERO; + return message; + } + +}; \ No newline at end of file diff --git a/packages/codegen/tsconfig.json b/packages/codegen/tsconfig.json new file mode 100644 index 00000000..e9a47446 --- /dev/null +++ b/packages/codegen/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "target": "ES2015", + "moduleResolution": "node", + "allowJs": true, + "esModuleInterop": true, + "declaration": true, + "outDir": "dist", + // "declarationDir": "./types", + "emitDeclarationOnly": true, + "isolatedModules": true + }, + "include": [ + "src/**/*" + ], + "exclude": [ + "node_modules" + ] +} \ No newline at end of file diff --git a/packages/dashboard/package.json b/packages/dashboard/package.json index d3122e98..f0c1f6a2 100644 --- a/packages/dashboard/package.json +++ b/packages/dashboard/package.json @@ -3,7 +3,7 @@ "version": "0.0.0", "private": true, "scripts": { - "dev": "vite", + "serve": "vite", "build": "run-p type-check build-only", "preview": "vite preview", "build-only": "vite build", @@ -16,13 +16,17 @@ "@floating-ui/dom": "^1.2.0", "@iconify/vue": "^4.1.0", "@intlify/unplugin-vue-i18n": "^0.8.2", - "@osmonauts/lcd": "^0.10.0", + "@osmonauts/lcd": "^0.8.0", "@ping-pub/chain-registry-client": "^0.0.25", + "@ping-pub/codegen": "*", "@vitejs/plugin-vue-jsx": "^3.0.0", "@vueuse/core": "^9.12.0", "@vueuse/math": "^9.12.0", + "apexcharts": "^3.37.1", "axios": "^1.3.2", "cross-fetch": "^3.1.5", + "md-editor-v3": "^2.8.1", + "numeral": "^2.0.6", "osmojs": "^14.0.0-rc.0", "pinia": "^2.0.28", "vite-plugin-vue-layouts": "^0.7.0", @@ -30,13 +34,17 @@ "vue": "^3.2.45", "vue-i18n": "^9.2.2", "vue-router": "^4.1.6", + "vue3-apexcharts": "^1.4.1", "vue3-perfect-scrollbar": "^1.6.1", "vuetify": "3.0.6", "webfontloader": "^1.6.28" }, "devDependencies": { + "@osmonauts/telescope": "^0.88.2", "@rushstack/eslint-patch": "^1.1.4", + "@types/marked": "^4.0.8", "@types/node": "^18.11.12", + "@types/numeral": "^2.0.2", "@vitejs/plugin-vue": "^4.0.0", "@vue/eslint-config-prettier": "^7.0.0", "@vue/eslint-config-typescript": "^11.0.0", diff --git a/packages/dashboard/src/components/charts/PriceMarketChart.vue b/packages/dashboard/src/components/charts/PriceMarketChart.vue new file mode 100644 index 00000000..1a026e83 --- /dev/null +++ b/packages/dashboard/src/components/charts/PriceMarketChart.vue @@ -0,0 +1,30 @@ + + + \ No newline at end of file diff --git a/packages/dashboard/src/components/charts/apexCharConfig.ts b/packages/dashboard/src/components/charts/apexCharConfig.ts new file mode 100644 index 00000000..920f2759 --- /dev/null +++ b/packages/dashboard/src/components/charts/apexCharConfig.ts @@ -0,0 +1,778 @@ +import type { ThemeInstance } from 'vuetify' +import { hexToRgb } from '@/plugins/vuetify/@layouts/utils' +import numeral from 'numeral' + +// 👉 Colors variables +const colorVariables = (themeColors: ThemeInstance['themes']['value']['colors']) => { + const themeSecondaryTextColor = `rgba(${hexToRgb(themeColors.colors['on-surface'])},${themeColors.variables['medium-emphasis-opacity']})` + const themeDisabledTextColor = `rgba(${hexToRgb(themeColors.colors['on-surface'])},${themeColors.variables['disabled-opacity']})` + const themeBorderColor = `rgba(${hexToRgb(String(themeColors.variables['border-color']))},${themeColors.variables['border-opacity']})` + const themePrimaryTextColor = `rgba(${hexToRgb(themeColors.colors['on-surface'])},${themeColors.variables['high-emphasis-opacity']})` + + return { themeSecondaryTextColor, themeDisabledTextColor, themeBorderColor, themePrimaryTextColor } +} +/// Price Chart config +export const getMarketPriceChartConfig = (themeColors: ThemeInstance['themes']['value']['colors'], categories: string[]) => { + + const { themeSecondaryTextColor, themeBorderColor, themeDisabledTextColor } = colorVariables(themeColors) + + return { + chart: { + height: 260, + parentHeightOffset: 0, + toolbar: { show: false }, + }, + tooltip: { + theme: 'dark', + shared: false + }, + dataLabels: { enabled: false }, + stroke: { + // show: false, + curve: 'smooth', + width: 1.5, + }, + legend: { + position: 'top', + horizontalAlign: 'left', + + labels: { colors: themeSecondaryTextColor }, + markers: { + offsetY: 1, + offsetX: -3, + }, + itemMargin: { + vertical: 3, + horizontal: 10, + }, + }, + + colors: [themeColors.colors.primary], + fill: { + opacity: 0.5, + type: 'gradient', + }, + grid: { + show: true, + borderColor: themeBorderColor, + xaxis: { + lines: { show: true }, + }, + }, + yaxis: { + labels: { + style: { colors: themeDisabledTextColor }, + formatter: function (value: string) { + const pattern = (Number(value) > 0.01 ? '0.0[0]a': '0.00[000]') + return numeral(value).format(pattern); + } + }, + }, + xaxis: { + type: 'datetime', + axisBorder: { show: false }, + + axisTicks: { color: themeBorderColor }, + crosshairs: { + stroke: { color: themeBorderColor }, + }, + labels: { + style: { colors: themeDisabledTextColor }, + }, + categories, + }, + } +} + +/// default config + +export const getScatterChartConfig = (themeColors: ThemeInstance['themes']['value']['colors']) => { + const scatterColors = { + series1: '#ff9f43', + series2: '#7367f0', + series3: '#28c76f', + } + + const { themeSecondaryTextColor, themeBorderColor, themeDisabledTextColor } = colorVariables(themeColors) + + return { + chart: { + parentHeightOffset: 0, + toolbar: { show: false }, + zoom: { + type: 'xy', + enabled: true, + }, + }, + legend: { + position: 'top', + horizontalAlign: 'left', + markers: { offsetX: -3 }, + + labels: { colors: themeSecondaryTextColor }, + itemMargin: { + vertical: 3, + horizontal: 10, + }, + }, + colors: [scatterColors.series1, scatterColors.series2, scatterColors.series3], + grid: { + borderColor: themeBorderColor, + xaxis: { + lines: { show: true }, + }, + }, + yaxis: { + labels: { + style: { colors: themeDisabledTextColor }, + }, + }, + xaxis: { + tickAmount: 10, + axisBorder: { show: false }, + + axisTicks: { color: themeBorderColor }, + crosshairs: { + stroke: { color: themeBorderColor }, + }, + labels: { + style: { colors: themeDisabledTextColor }, + formatter: (val: string) => parseFloat(val).toFixed(1), + }, + }, + } +} +export const getLineChartSimpleConfig = (themeColors: ThemeInstance['themes']['value']['colors']) => { + const { themeBorderColor, themeDisabledTextColor } = colorVariables(themeColors) + + return { + chart: { + parentHeightOffset: 0, + zoom: { enabled: false }, + toolbar: { show: false }, + }, + colors: ['#ff9f43'], + stroke: { curve: 'straight' }, + dataLabels: { enabled: false }, + markers: { + strokeWidth: 7, + strokeOpacity: 1, + colors: ['#ff9f43'], + strokeColors: ['#fff'], + }, + grid: { + padding: { top: -10 }, + + borderColor: themeBorderColor, + xaxis: { + lines: { show: true }, + }, + }, + tooltip: { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + custom(data: any) { + return `
+ ${data.series[data.seriesIndex][data.dataPointIndex]}% +
` + }, + }, + yaxis: { + labels: { + style: { colors: themeDisabledTextColor }, + }, + }, + xaxis: { + axisBorder: { show: false }, + + axisTicks: { color: themeBorderColor }, + crosshairs: { + stroke: { color: themeBorderColor }, + }, + labels: { + style: { colors: themeDisabledTextColor }, + }, + categories: [ + '7/12', + '8/12', + '9/12', + '10/12', + '11/12', + '12/12', + '13/12', + '14/12', + '15/12', + '16/12', + '17/12', + '18/12', + '19/12', + '20/12', + '21/12', + ], + }, + } +} + +export const getBarChartConfig = (themeColors: ThemeInstance['themes']['value']['colors']) => { + const { themeBorderColor, themeDisabledTextColor } = colorVariables(themeColors) + + return { + chart: { + parentHeightOffset: 0, + toolbar: { show: false }, + }, + colors: ['#00cfe8'], + dataLabels: { enabled: false }, + plotOptions: { + bar: { + borderRadius: 8, + barHeight: '30%', + horizontal: true, + startingShape: 'rounded', + }, + }, + grid: { + borderColor: themeBorderColor, + xaxis: { + lines: { show: false }, + }, + padding: { + top: -10, + }, + }, + yaxis: { + labels: { + style: { colors: themeDisabledTextColor }, + }, + }, + xaxis: { + axisBorder: { show: false }, + axisTicks: { color: themeBorderColor }, + categories: ['MON, 11', 'THU, 14', 'FRI, 15', 'MON, 18', 'WED, 20', 'FRI, 21', 'MON, 23'], + labels: { + style: { colors: themeDisabledTextColor }, + }, + }, + } +} + +export const getCandlestickChartConfig = (themeColors: ThemeInstance['themes']['value']['colors']) => { + const candlestickColors = { + series1: '#28c76f', + series2: '#ea5455', + } + + const { themeBorderColor, themeDisabledTextColor } = colorVariables(themeColors) + + return { + chart: { + parentHeightOffset: 0, + toolbar: { show: false }, + }, + plotOptions: { + bar: { columnWidth: '40%' }, + candlestick: { + colors: { + upward: candlestickColors.series1, + downward: candlestickColors.series2, + }, + }, + }, + grid: { + padding: { top: -10 }, + borderColor: themeBorderColor, + xaxis: { + lines: { show: true }, + }, + }, + yaxis: { + tooltip: { enabled: true }, + crosshairs: { + stroke: { color: themeBorderColor }, + }, + labels: { + style: { colors: themeDisabledTextColor }, + }, + }, + xaxis: { + type: 'datetime', + axisBorder: { show: false }, + axisTicks: { color: themeBorderColor }, + crosshairs: { + stroke: { color: themeBorderColor }, + }, + labels: { + style: { colors: themeDisabledTextColor }, + }, + }, + } +} +export const getRadialBarChartConfig = (themeColors: ThemeInstance['themes']['value']['colors']) => { + const radialBarColors = { + series1: '#fdd835', + series2: '#32baff', + series3: '#00d4bd', + series4: '#7367f0', + series5: '#FFA1A1', + } + + const { themeSecondaryTextColor, themePrimaryTextColor } = colorVariables(themeColors) + + return { + stroke: { lineCap: 'round' }, + labels: ['Comments', 'Replies', 'Shares'], + legend: { + show: true, + position: 'bottom', + labels: { + colors: themeSecondaryTextColor, + }, + markers: { + offsetX: -3, + }, + itemMargin: { + vertical: 3, + horizontal: 10, + }, + }, + colors: [radialBarColors.series1, radialBarColors.series2, radialBarColors.series4], + plotOptions: { + radialBar: { + hollow: { size: '30%' }, + track: { + margin: 15, + background: themeColors.colors['grey-100'], + }, + dataLabels: { + name: { + fontSize: '2rem', + }, + value: { + fontSize: '1rem', + color: themeSecondaryTextColor, + }, + total: { + show: true, + fontWeight: 400, + label: 'Comments', + fontSize: '1.125rem', + + color: themePrimaryTextColor, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + formatter(w: { globals: { seriesTotals: any[]; series: string | any[] } }) { + const totalValue + = w.globals.seriesTotals.reduce((a: number, b: number) => { + return a + b + }, 0) / w.globals.series.length + + if (totalValue % 1 === 0) + return `${totalValue}%` + else + return `${totalValue.toFixed(2)}%` + }, + }, + }, + }, + }, + grid: { + padding: { + top: -35, + bottom: -30, + }, + }, + } +} + +export const getDonutChartConfig = (themeColors: ThemeInstance['themes']['value']['colors']) => { + const donutColors = { + series1: '#fdd835', + series2: '#00d4bd', + series3: '#826bf8', + series4: '#32baff', + series5: '#ffa1a1', + } + + const { themeSecondaryTextColor, themePrimaryTextColor } = colorVariables(themeColors) + + return { + stroke: { width: 0 }, + labels: ['Operational', 'Networking', 'Hiring', 'R&D'], + colors: [donutColors.series1, donutColors.series5, donutColors.series3, donutColors.series2], + dataLabels: { + enabled: true, + formatter: (val: string) => `${parseInt(val, 10)}%`, + }, + legend: { + position: 'bottom', + markers: { offsetX: -3 }, + labels: { colors: themeSecondaryTextColor }, + itemMargin: { + vertical: 3, + horizontal: 10, + }, + }, + plotOptions: { + pie: { + donut: { + labels: { + show: true, + name: { + fontSize: '1.5rem', + }, + value: { + fontSize: '1.5rem', + color: themeSecondaryTextColor, + formatter: (val: string) => `${parseInt(val, 10)}`, + }, + total: { + show: true, + fontSize: '1.5rem', + label: 'Operational', + formatter: () => '31%', + color: themePrimaryTextColor, + }, + }, + }, + }, + }, + responsive: [ + { + breakpoint: 992, + options: { + chart: { + height: 380, + }, + legend: { + position: 'bottom', + }, + }, + }, + { + breakpoint: 576, + options: { + chart: { + height: 320, + }, + plotOptions: { + pie: { + donut: { + labels: { + show: true, + name: { + fontSize: '1rem', + }, + value: { + fontSize: '1rem', + }, + total: { + fontSize: '1rem', + }, + }, + }, + }, + }, + }, + }, + ], + } +} + +export const getAreaChartSplineConfig = (themeColors: ThemeInstance['themes']['value']['colors']) => { + const areaColors = { + series3: '#e0cffe', + series2: '#b992fe', + series1: '#ab7efd', + } + + const { themeSecondaryTextColor, themeBorderColor, themeDisabledTextColor } = colorVariables(themeColors) + + return { + chart: { + parentHeightOffset: 0, + toolbar: { show: false }, + }, + tooltip: { shared: false }, + dataLabels: { enabled: false }, + stroke: { + show: false, + curve: 'straight', + }, + legend: { + position: 'top', + horizontalAlign: 'left', + + labels: { colors: themeSecondaryTextColor }, + markers: { + offsetY: 1, + offsetX: -3, + }, + itemMargin: { + vertical: 3, + horizontal: 10, + }, + }, + + colors: [areaColors.series3, areaColors.series2, areaColors.series1], + fill: { + opacity: 1, + type: 'solid', + }, + grid: { + show: true, + borderColor: themeBorderColor, + xaxis: { + lines: { show: true }, + }, + }, + yaxis: { + labels: { + style: { colors: themeDisabledTextColor }, + }, + }, + xaxis: { + axisBorder: { show: false }, + + axisTicks: { color: themeBorderColor }, + crosshairs: { + stroke: { color: themeBorderColor }, + }, + labels: { + style: { colors: themeDisabledTextColor }, + }, + categories: [ + '7/12', + '8/12', + '9/12', + '10/12', + '11/12', + '12/12', + '13/12', + '14/12', + '15/12', + '16/12', + '17/12', + '18/12', + '19/12', + ], + }, + } +} + +export const getColumnChartConfig = (themeColors: ThemeInstance['themes']['value']['colors']) => { + const columnColors = { + series1: '#826af9', + series2: '#d2b0ff', + bg: '#f8d3ff', + } + + const { themeSecondaryTextColor, themeBorderColor, themeDisabledTextColor } = colorVariables(themeColors) + + return { + chart: { + offsetX: -10, + stacked: true, + parentHeightOffset: 0, + toolbar: { show: false }, + }, + fill: { opacity: 1 }, + dataLabels: { enabled: false }, + + colors: [columnColors.series1, columnColors.series2], + legend: { + position: 'top', + horizontalAlign: 'left', + + labels: { colors: themeSecondaryTextColor }, + markers: { + offsetY: 1, + offsetX: -3, + }, + itemMargin: { + vertical: 3, + horizontal: 10, + }, + }, + stroke: { + show: true, + colors: ['transparent'], + }, + plotOptions: { + bar: { + columnWidth: '15%', + colors: { + backgroundBarRadius: 10, + + backgroundBarColors: [columnColors.bg, columnColors.bg, columnColors.bg, columnColors.bg, columnColors.bg], + }, + }, + }, + grid: { + borderColor: themeBorderColor, + xaxis: { + lines: { show: true }, + }, + }, + yaxis: { + labels: { + style: { colors: themeDisabledTextColor }, + }, + }, + xaxis: { + axisBorder: { show: false }, + + axisTicks: { color: themeBorderColor }, + categories: ['7/12', '8/12', '9/12', '10/12', '11/12', '12/12', '13/12', '14/12', '15/12'], + crosshairs: { + stroke: { color: themeBorderColor }, + }, + labels: { + style: { colors: themeDisabledTextColor }, + }, + }, + responsive: [ + { + breakpoint: 600, + options: { + plotOptions: { + bar: { + columnWidth: '35%', + }, + }, + }, + }, + ], + } +} + +export const getHeatMapChartConfig = (themeColors: ThemeInstance['themes']['value']['colors']) => { + const { themeSecondaryTextColor, themeDisabledTextColor } = colorVariables(themeColors) + + return { + chart: { + parentHeightOffset: 0, + toolbar: { show: false }, + }, + dataLabels: { enabled: false }, + stroke: { + colors: [themeColors.colors.surface], + }, + legend: { + position: 'bottom', + labels: { + colors: themeSecondaryTextColor, + }, + markers: { + offsetY: 0, + offsetX: -3, + }, + itemMargin: { + vertical: 3, + horizontal: 10, + }, + }, + plotOptions: { + heatmap: { + enableShades: false, + colorScale: { + ranges: [ + { to: 10, from: 0, name: '0-10', color: '#b9b3f8' }, + { to: 20, from: 11, name: '10-20', color: '#aba4f6' }, + { to: 30, from: 21, name: '20-30', color: '#9d95f5' }, + { to: 40, from: 31, name: '30-40', color: '#8f85f3' }, + { to: 50, from: 41, name: '40-50', color: '#8176f2' }, + { to: 60, from: 51, name: '50-60', color: '#7367f0' }, + ], + }, + }, + }, + grid: { + padding: { top: -20 }, + }, + yaxis: { + labels: { + style: { + colors: themeDisabledTextColor, + }, + }, + }, + xaxis: { + labels: { show: false }, + axisTicks: { show: false }, + axisBorder: { show: false }, + }, + } +} + +export const getRadarChartConfig = (themeColors: ThemeInstance['themes']['value']['colors']) => { + const radarColors = { + series1: '#9b88fa', + series2: '#ffa1a1', + } + + const { themeSecondaryTextColor, themeBorderColor, themeDisabledTextColor } = colorVariables(themeColors) + + return { + chart: { + parentHeightOffset: 0, + toolbar: { show: false }, + dropShadow: { + top: 1, + blur: 8, + left: 1, + opacity: 0.2, + enabled: false, + }, + }, + markers: { size: 0 }, + fill: { opacity: [1, 0.8] }, + colors: [radarColors.series1, radarColors.series2], + stroke: { + width: 0, + show: false, + }, + legend: { + labels: { + colors: themeSecondaryTextColor, + }, + markers: { + offsetX: -3, + }, + itemMargin: { + vertical: 3, + horizontal: 10, + }, + }, + plotOptions: { + radar: { + polygons: { + strokeColors: themeBorderColor, + connectorColors: themeBorderColor, + }, + }, + }, + grid: { + show: false, + padding: { + top: -20, + bottom: -20, + }, + }, + yaxis: { show: false }, + xaxis: { + categories: ['Battery', 'Brand', 'Camera', 'Memory', 'Storage', 'Display', 'OS', 'Price'], + labels: { + style: { + colors: [ + themeDisabledTextColor, + themeDisabledTextColor, + themeDisabledTextColor, + themeDisabledTextColor, + themeDisabledTextColor, + themeDisabledTextColor, + themeDisabledTextColor, + themeDisabledTextColor, + ], + }, + }, + }, + } +} diff --git a/packages/dashboard/src/layouts/components/Breadcrumbs.vue b/packages/dashboard/src/layouts/components/Breadcrumbs.vue new file mode 100644 index 00000000..2d328044 --- /dev/null +++ b/packages/dashboard/src/layouts/components/Breadcrumbs.vue @@ -0,0 +1,25 @@ + + \ No newline at end of file diff --git a/packages/dashboard/src/layouts/components/DefaultLayout.vue b/packages/dashboard/src/layouts/components/DefaultLayout.vue index ac99dff9..b10ac042 100644 --- a/packages/dashboard/src/layouts/components/DefaultLayout.vue +++ b/packages/dashboard/src/layouts/components/DefaultLayout.vue @@ -14,6 +14,7 @@ import NavBarI18n from './NavBarI18n.vue' import NavSearchBar from './NavSearchBar.vue' import NavBarNotifications from './NavBarNotifications.vue' import TheCustomizer from '@/plugins/vuetify/@core/components/TheCustomizer.vue' +import Breadcrumbs from './Breadcrumbs.vue' const { appRouteTransition, isLessThanOverlayNavBreakpoint, isVerticalNavCollapsed } = useThemeConfig() const { width: windowWidth } = useWindowSize() diff --git a/packages/dashboard/src/libs/client.ts b/packages/dashboard/src/libs/client.ts new file mode 100644 index 00000000..6b5361fb --- /dev/null +++ b/packages/dashboard/src/libs/client.ts @@ -0,0 +1,71 @@ +import { LCDQueryClient as BankRestClient} from '@ping-pub/codegen/src/cosmos/bank/v1beta1/query.lcd' +import { LCDQueryClient as BaseRestClient} from '@ping-pub/codegen/src/cosmos/base/tendermint/v1beta1/query.lcd' +import { LCDQueryClient as MintRestClient} from '@ping-pub/codegen/src/cosmos/mint/v1beta1/query.lcd' +import { LCDQueryClient as DistributionRestClient} from '@ping-pub/codegen/src/cosmos/distribution/v1beta1/query.lcd' +import { LCDQueryClient as GovRestClient} from '@ping-pub/codegen/src/cosmos/gov/v1/query.lcd' +import { LCDQueryClient as StakingRestClient} from '@ping-pub/codegen/src/cosmos/staking/v1beta1/query.lcd' +import { LCDQueryClient as SlashingRestClient} from '@ping-pub/codegen/src/cosmos/slashing/v1beta1/query.lcd' +import type { LCDClient } from '@osmonauts/lcd' + +const mint: Record = {} + +function createMintClient(c: new (rest: { requestClient: B}) => A, requestClient: B): A { + return new c({requestClient}); +} +export function createMintClientForChain(chainName: string, lcd: LCDClient) { + return createMintClient(mint[chainName] || MintRestClient, lcd) +} + +const bank: Record = {} + +function createBankClient(c: new (rest: { requestClient: B}) => A, requestClient: B): A { + return new c({requestClient}); +} +export function createBankClientForChain(chainName: string, lcd: LCDClient) { + return createBankClient(bank[chainName] || BankRestClient, lcd) +} + +const base: Record = {} + +function createBaseClient(c: new (rest: { requestClient: B}) => A, requestClient: B): A { + return new c({requestClient}); +} +export function createBaseClientForChain(chainName: string, lcd: LCDClient) { + return createBaseClient(base[chainName] || BaseRestClient, lcd) +} + +const distribution: Record = {} + +function createDistributionClient(c: new (rest: { requestClient: B}) => A, requestClient: B): A { + return new c({requestClient}); +} +export function createDistributionClientForChain(chainName: string, lcd: LCDClient) { + return createDistributionClient(distribution[chainName] || DistributionRestClient, lcd) +} + +const gov: Record = {} + +function createGovClient(c: new (rest: { requestClient: B}) => A, requestClient: B): A { + return new c({requestClient}); +} +export function createGovRestClientForChain(chainName: string, lcd: LCDClient) { + return createGovClient(gov[chainName] || GovRestClient, lcd) +} + +const staking: Record = {} + +function createStakingClient(c: new (rest: { requestClient: B}) => A, requestClient: B): A { + return new c({requestClient}); +} +export function createStakingRestClientForChain(chainName: string, lcd: LCDClient) { + return createStakingClient(staking[chainName] || StakingRestClient, lcd) +} + +const slasing: Record = {} + +function createSlashingClient(c: new (rest: { requestClient: B}) => A, requestClient: B): A { + return new c({requestClient}); +} +export function createSlashingRestClientForChain(chainName: string, lcd: LCDClient) { + return createSlashingClient(slasing[chainName] || SlashingRestClient, lcd) +} diff --git a/packages/dashboard/src/libs/extensions/OsmosMintExtention.ts b/packages/dashboard/src/libs/extensions/OsmosMintExtention.ts new file mode 100644 index 00000000..c5723062 --- /dev/null +++ b/packages/dashboard/src/libs/extensions/OsmosMintExtention.ts @@ -0,0 +1,38 @@ +import type { LCDClient } from "@osmonauts/lcd"; +import type { QueryAnnualProvisionsRequest, QueryAnnualProvisionsResponseSDKType, QueryInflationRequest, QueryInflationResponseSDKType, QueryParamsRequest, QueryParamsResponseSDKType } from '@ping-pub/codegen/src/cosmos/mint/v1beta1/query' + +export class OsmosisMintClient { + req: LCDClient; + + constructor({ + requestClient + }: { + requestClient: LCDClient; + }) { + this.req = requestClient; + this.params = this.params.bind(this); + this.inflation = this.inflation.bind(this); + this.annualProvisions = this.annualProvisions.bind(this); + } + /* Params returns the total set of minting parameters. */ + + + async params(_params: QueryParamsRequest = {}): Promise { + const endpoint = `cosmos/mint/v1beta1/params`; + return await this.req.get(endpoint); + } + /* Inflation returns the current minting inflation value. */ + + + async inflation(_params: QueryInflationRequest = {}): Promise { + const endpoint = `cosmos/mint/v1beta1/inflation`; + return await this.req.get(endpoint); + } + /* AnnualProvisions current minting annual provisions value. */ + + + async annualProvisions(_params: QueryAnnualProvisionsRequest = {}): Promise { + const endpoint = `cosmos/mint/v1beta1/annual_provisions`; + return await this.req.get(endpoint); + } +} \ No newline at end of file diff --git a/packages/dashboard/src/modules/[chain]/block.ts b/packages/dashboard/src/modules/[chain]/block.ts new file mode 100644 index 00000000..4edfd9a9 --- /dev/null +++ b/packages/dashboard/src/modules/[chain]/block.ts @@ -0,0 +1,15 @@ +import { defineStore } from "pinia"; + +export const useBlockModule = defineStore('blockModule', { + state: () => { + return { + latest: {}, + recents: [] + } + }, + getters: { + + }, + actions: { + } +}) \ No newline at end of file diff --git a/packages/dashboard/src/modules/[chain]/index.vue b/packages/dashboard/src/modules/[chain]/index.vue index 5cc5fae5..203e8831 100644 --- a/packages/dashboard/src/modules/[chain]/index.vue +++ b/packages/dashboard/src/modules/[chain]/index.vue @@ -1,32 +1,126 @@ \ No newline at end of file diff --git a/packages/dashboard/src/modules/[chain]/store.ts b/packages/dashboard/src/modules/[chain]/store.ts new file mode 100644 index 00000000..bbdcf3b3 --- /dev/null +++ b/packages/dashboard/src/modules/[chain]/store.ts @@ -0,0 +1,130 @@ +import { useBlockchain, useCoingecko } from "@/stores"; +import numeral from "numeral"; +import { defineStore } from "pinia"; + +function colorMap(color: string) { + switch (color) { + case 'yellow': + return 'warning' + case 'green': + return 'success' + default: + return 'secondary' + } +} + +export const useIndexModule = defineStore('module-index', { + state: () => { + return { + days: 14, + tickerIndex: 0, + coinInfo: { + name: '', + symbol: '', + description: { + en: '' + }, + market_cap_rank: 0, + links: { + twitter_screen_name: '', + homepage: [] as string[], + repos_url: { + github: [] + }, + telegram_channel_identifier: '' + }, + market_data: { + price_change_percentage_24h: 0 + }, + tickers: [] as { + market: { + name: string, + identifier: string, + }, + coin_id: string, + target_coin_id: string, + trust_score: string, + trade_url: string, + converted_last: { + btc: number, + eth: number, + usd: number, + }, + base: string, + target: string, + }[] + }, + marketData: { + market_caps: [], + prices: [] as number[], + total_volumes: [] as number[], + } + } + }, + getters: { + blockchain() { + const chain = useBlockchain() + return chain.current + }, + coingecko() { + const store = useCoingecko() + return store + }, + twitter() : string { + return `https://twitter.com/${this.coinInfo.links.twitter_screen_name}` + }, + homepage(): string { + const [page1, page2, page3] = this.coinInfo.links?.homepage + return page1 || page2 || page3 + }, + github(): string { + const [page1, page2, page3] = this.coinInfo.links?.repos_url?.github + return page1 || page2 || page3 + }, + telegram() : string { + return `https://t.me/${this.coinInfo.links.telegram_channel_identifier}` + }, + + priceChange(): string { + const change = this.coinInfo.market_data?.price_change_percentage_24h || 0 + console.log(change, 'change') + return numeral(change).format('+0.[00]') + }, + + priceColor() : string { + const change = this.coinInfo.market_data?.price_change_percentage_24h || 0 + switch (true) { + case change > 0: + return 'text-success' + case change < 0: + return 'text-error' + default: + return '' + } + }, + trustColor() : string { + const change = this.coinInfo.tickers[this.tickerIndex]?.trust_score + return colorMap(change) + }, + }, + actions: { + tickerColor(color: string) { + return colorMap(color) + }, + initCoingecko() { + this.tickerIndex = 0 + const [firstAsset] = this.blockchain?.assets + if (firstAsset && firstAsset.coingecko_id) { + this.coingecko.getCoinInfo(firstAsset.coingecko_id).then(x => { + this.coinInfo = x + }) + this.coingecko.getMarketChart(this.days, firstAsset.coingecko_id).then(x => { + this.marketData = x + }) + } + }, + selectTicker(i: number) { + this.tickerIndex = i + } + } +}) \ No newline at end of file diff --git a/packages/dashboard/src/modules/wallet/test.vue b/packages/dashboard/src/modules/wallet/test.vue new file mode 100644 index 00000000..9a8bf65b --- /dev/null +++ b/packages/dashboard/src/modules/wallet/test.vue @@ -0,0 +1,3 @@ + \ No newline at end of file diff --git a/packages/dashboard/src/pages/index.vue b/packages/dashboard/src/pages/index.vue index d9f6629b..1378120d 100644 --- a/packages/dashboard/src/pages/index.vue +++ b/packages/dashboard/src/pages/index.vue @@ -2,6 +2,7 @@ import { useDashboard, LoadingStatus, type ChainConfig } from '@/stores/useDashboard'; import ChainSummary from '@/components/ChainSummary.vue'; import { computed, ref } from 'vue'; +import { useBlockchain } from '@/stores'; const dashboard = useDashboard() @@ -16,6 +17,7 @@ const chains = computed(()=> { return Object.values(dashboard.chains) } }) +const chain = useBlockchain() diff --git a/packages/dashboard/src/router/index.ts b/packages/dashboard/src/router/index.ts index f8e75086..645b8cef 100644 --- a/packages/dashboard/src/router/index.ts +++ b/packages/dashboard/src/router/index.ts @@ -14,6 +14,7 @@ const router = createRouter({ //update current blockchain router.beforeEach((to) => { const { chain } = to.params + console.log('chain', chain) if(chain){ const dashboard = useDashboard() dashboard.setCurrentChain(chain.toString()) diff --git a/packages/dashboard/src/stores/counter.ts b/packages/dashboard/src/stores/counter.ts deleted file mode 100644 index 374b4d03..00000000 --- a/packages/dashboard/src/stores/counter.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { ref, computed } from "vue"; -import { defineStore } from "pinia"; - -export const useCounterStore = defineStore("counter", () => { - const count = ref(0); - const doubleCount = computed(() => count.value * 2); - function increment() { - count.value++; - } - - return { count, doubleCount, increment }; -}); diff --git a/packages/dashboard/src/stores/index.ts b/packages/dashboard/src/stores/index.ts new file mode 100644 index 00000000..ed1a543d --- /dev/null +++ b/packages/dashboard/src/stores/index.ts @@ -0,0 +1,4 @@ +export * from './useBankStore' +export * from './useBlockchain' +export * from './useCoinGecko' +export * from './useDashboard' \ No newline at end of file diff --git a/packages/dashboard/src/stores/option.ts b/packages/dashboard/src/stores/option.ts deleted file mode 100644 index e69de29b..00000000 diff --git a/packages/dashboard/src/stores/template.ts b/packages/dashboard/src/stores/template.ts new file mode 100644 index 00000000..136e0905 --- /dev/null +++ b/packages/dashboard/src/stores/template.ts @@ -0,0 +1,13 @@ +import { defineStore } from "pinia"; + +export const useStoreName = defineStore('bankstore', { + state: () => { + return { + } + }, + getters: { + + }, + actions: { + } +}) \ No newline at end of file diff --git a/packages/dashboard/src/stores/useBank.ts b/packages/dashboard/src/stores/useBank.ts deleted file mode 100644 index d243fd55..00000000 --- a/packages/dashboard/src/stores/useBank.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { ref, computed, type ComputedRef } from "vue"; -import { defineStore } from "pinia"; -import { useBlockchain } from "./useBlockchain"; - -import { osmosis } from 'osmojs'; -import { LCDClient } from '@osmonauts/lcd' -import { LCDQueryClient } from 'osmojs/main/codegen/cosmos/bank/v1beta1/query.lcd.js' -import type { QueryTotalSupplyRequest, QueryTotalSupplyResponseSDKType } from "osmojs/types/codegen/cosmos/bank/v1beta1/query"; - -export const useBank = defineStore("usebank", () => { - const totalSupply = ref({} as QueryTotalSupplyResponseSDKType); - - const client = computed(() => { - const blockchain = useBlockchain() - return new LCDQueryClient(new LCDClient({restEndpoint: blockchain.availableEndpoint})) -}) - - async function fetchTotalSupply({}) { - console.log(client) - totalSupply.value = await client.value.totalSupply({}) - return totalSupply.value; - } - - return { totalSupply, fetchTotalSupply }; -}); diff --git a/packages/dashboard/src/stores/useBankStore.ts b/packages/dashboard/src/stores/useBankStore.ts index 38536a60..7653681f 100644 --- a/packages/dashboard/src/stores/useBankStore.ts +++ b/packages/dashboard/src/stores/useBankStore.ts @@ -1,8 +1,4 @@ -import { ref, computed } from "vue"; import { defineStore } from "pinia"; -import { osmosis } from 'osmojs'; - -import { LCDQueryClient } from 'osmojs/main/codegen/cosmos/bank/v1beta1/query.lcd.js' import type { QuerySupplyOfRequest, QueryTotalSupplyRequest, QueryBalanceRequest,QueryAllBalancesRequest, QueryAllBalancesResponseSDKType, QueryBalanceResponseSDKType, QueryTotalSupplyResponseSDKType } from "osmojs/types/codegen/cosmos/bank/v1beta1/query"; @@ -16,41 +12,37 @@ export const useBankStore = defineStore('bankstore', { totalSupply: {supply: []} as QueryTotalSupplyResponseSDKType, } }, - getters: { - lcdClient() : LCDQueryClient { - const requestClient = useBlockchain().restClient - return new LCDQueryClient( { requestClient }) - }, - }, - actions: { - cacheBalance(address: string, balances: CoinSDKType[]) { - if(this.balances[address]) { - this.balances[address] = [...this.balances[address], ... balances] - }else { - this.balances[address] = balances - } - }, - async fetchBalance(param: QueryBalanceRequest) : Promise { - const response : QueryBalanceResponseSDKType = await this.lcdClient.balance(param) - if (response.balance) this.cacheBalance(param.address, [response.balance]) - return response - }, - async fetchAllBalance(param: QueryAllBalancesRequest) : Promise { - const response : QueryAllBalancesResponseSDKType = await this.lcdClient.balance(param) - if (response.balances) this.cacheBalance(param.address, response.balances) - return response - }, - async fetchTotalSupply(param: QueryTotalSupplyRequest): Promise { - const response = await this.lcdClient.totalSupply(param) - this.totalSupply.supply = [...this.totalSupply.supply, ...response.supply] - this.totalSupply.pagination = response.pagination - return response - }, - async fetchSupply(param: QuerySupplyOfRequest) { - const c: LCDQueryClient = new LCDQueryClient( { requestClient: {} }) - return ''; - - } - } + // getters: { + // lcdClient() { + // return new client.bank.LCDQueryClient( { + // requestClient: useBlockchain().restClient + // }) + // }, + // }, + // actions: { + // cacheBalance(address: string, balances: CoinSDKType[]) { + // if(this.balances[address]) { + // this.balances[address] = [...this.balances[address], ... balances] + // }else { + // this.balances[address] = balances + // } + // }, + // async fetchBalance(param: QueryBalanceRequest) : Promise { + // const response : QueryBalanceResponseSDKType = await this.lcdClient.balance(param) + // if (response.balance) this.cacheBalance(param.address, [response.balance]) + // return response + // }, + // async fetchAllBalance(param: QueryAllBalancesRequest) : Promise { + // const response : QueryAllBalancesResponseSDKType = await this.lcdClient.allBalances(param) + // if (response.balances) this.cacheBalance(param.address, response.balances) + // return response + // }, + // async fetchTotalSupply(param: QueryTotalSupplyRequest): Promise { + // const response = await this.lcdClient.totalSupply(param) + // this.totalSupply.supply = [...this.totalSupply.supply, ...response.supply] + // this.totalSupply.pagination = response.pagination + // return response + // }, + // } }) diff --git a/packages/dashboard/src/stores/useBlockchain.ts b/packages/dashboard/src/stores/useBlockchain.ts index 653f1369..e6592674 100644 --- a/packages/dashboard/src/stores/useBlockchain.ts +++ b/packages/dashboard/src/stores/useBlockchain.ts @@ -1,62 +1,59 @@ import { ref, computed } from "vue"; import { defineStore } from "pinia"; -import { getLogo, useDashboard } from "./useDashboard"; +import { getLogo, useDashboard, type ChainConfig } from "./useDashboard"; import { useTheme } from 'vuetify' -import { osmosis, } from 'osmojs'; import { LCDClient } from '@osmonauts/lcd' +import { cosmos } from '@ping-pub/codegen' +import { createBaseClientForChain } from "@/libs/client"; -export const useBlockchain = defineStore("blockchain", () => { - const dbstore = useDashboard() - - const rest = ref('') - const status = ref({} as Record) - - const current = computed(()=>{ - return dbstore.getCurrentChain() - }); - const logo = computed(() => { - return current.value?.logo - }) - const name = computed(() => { - return current.value.chainName - }) - - const primaryColor = computed(() => { - const colors = ['#fff', '#fea', '#123', '#68f', '#aca', 'bbe', '#666CFF'] - const color = colors[Math.floor(Math.random() * colors.length)] - - const vuetifyTheme = useTheme() - const currentThemeName = vuetifyTheme.name.value - vuetifyTheme.themes.value[currentThemeName].colors.primary = color - return color - }) - const availableEndpoint = computed(() => { - const all = current.value?.endpoints?.rest - if(all) { - if(!rest.value || all.findIndex(x => x.address === rest.value) < 0) { - const rn = Math.random() - const endpoint = all[Math.floor(rn * all.length)] - rest.value = endpoint?.address || '' - } +export const useBlockchain = defineStore("blockchain", { + state: () => { + return { + status: {} as Record, + rest: '' + } + }, + getters: { + current() { + return useDashboard().getCurrentChain() + }, + logo() { + return this.current?.logo || '' + }, + name() { + return this.current?.chainName || '' + }, + primaryColor() { + const color = this.current.themeColor || '#666CFF' + const vuetifyTheme = useTheme() + const currentThemeName = vuetifyTheme.name.value + vuetifyTheme.themes.value[currentThemeName].colors.primary = color + return color + }, + availableEndpoint() : string { + const all = this.current?.endpoints?.rest + if(all) { + if(this.rest || all.findIndex(x => x.address === this.rest) < 0) { + const rn = Math.random() + const endpoint = all[Math.floor(rn * all.length)] + this.rest = endpoint?.address || '' + } + } + return this.rest + }, + restClient() { + return new LCDClient({restEndpoint: this.availableEndpoint()}) + }, + }, + actions: { + setRestEndpoint(endpoint: string) { + this.rest = endpoint + }, + calltest() { + console.log('call test') + const base = createBaseClientForChain(this.current.chainName, new LCDClient({restEndpoint: 'https://api.evmos.nodestake.top/'})) + console.log('base: ', base) + base.getLatestBlock().then(x => console.log(x)) } - - return rest.value - }) - - const restClient = computed(()=> { - return new LCDClient({restEndpoint: availableEndpoint.value}) - }) - - function setRestEndpoint(endpoint: string) { - rest.value = endpoint } - - return { - // states - availableEndpoint, - // getters - name, current, logo, primaryColor, restClient, - // actions - setRestEndpoint - }; -}); +}) diff --git a/packages/dashboard/src/stores/useCoinGecko.ts b/packages/dashboard/src/stores/useCoinGecko.ts new file mode 100644 index 00000000..10b56473 --- /dev/null +++ b/packages/dashboard/src/stores/useCoinGecko.ts @@ -0,0 +1,49 @@ +import { defineStore } from "pinia"; +import { get } from '../libs/http' +import type { LoadingStatus } from "./useDashboard"; + +export interface PriceMeta { + usd?: string, + usd_24h_change?: string, + cny?: string, + cny_24h_change? : string, + eur?: string, + eur_24h_change?: string, +} + +const LocalStoreKey = 'currency' + +export const useCoingecko = defineStore('coingecko', { + state: () => { + const currency = localStorage.getItem(LocalStoreKey) + return { + currency, // secondary currency + loadStatus: {} as Record, + prices: {} as Record, + marketChart: {} + } + }, + getters: { + + }, + + actions: { + getMarketChart(days = 30, coinId = 'cosmos') { + return get(`https://api.coingecko.com/api/v3/coins/${coinId}/market_chart?vs_currency=usd&days=${days}`) + }, + + fetchCoinPrice(ids: string[]) { + const url = `https://api.coingecko.com/api/v3/simple/price?include_24hr_change=true&vs_currencies=${['usd', this.currency].join(',')}&ids=${ids.join(',')}` + get(url).then(data => { + this.prices = {...this.prices, ...data} + }) + }, + getCoinInfo(coinId: string) { + return get(`https://api.coingecko.com/api/v3/coins/${coinId}`) + }, + setSecondaryCurrency(currency: string) { + localStorage.setItem(LocalStoreKey, currency) + this.currency = currency + } + } +}) \ No newline at end of file diff --git a/packages/dashboard/src/stores/useDashboard.ts b/packages/dashboard/src/stores/useDashboard.ts index 8355bd3d..7c08e7e2 100644 --- a/packages/dashboard/src/stores/useDashboard.ts +++ b/packages/dashboard/src/stores/useDashboard.ts @@ -60,6 +60,7 @@ export interface ChainConfig { bech32Prefix: string, chainId: string, assets: Asset[], + themeColor?: string, endpoints: { rest?: Endpoint[] rpc?: Endpoint[] @@ -239,7 +240,7 @@ export const useDashboard = defineStore("dashboard", () => { } } function getCurrentChain() { - return chains.value[current.value] + return chains.value[current.value] || Object.values(chains.value)[0] } function setConfigSource(newSource: ConfigSource) { source.value = newSource diff --git a/packages/dashboard/src/stores/useEndpoint.ts b/packages/dashboard/src/stores/useEndpoint.ts new file mode 100644 index 00000000..07c7b9d1 --- /dev/null +++ b/packages/dashboard/src/stores/useEndpoint.ts @@ -0,0 +1,15 @@ +import { defineStore } from "pinia"; +import type { LCDClient } from '@osmonauts/lcd'; + +export const useEndpoint = defineStore('pinia.endpoint', { + state: () => { + return { + restClient: {} as LCDClient, + } + }, + actions: { + setRestClient(client: LCDClient) { + this.restClient = client + } + } +}) \ No newline at end of file diff --git a/packages/dashboard/vite.config.ts b/packages/dashboard/vite.config.ts index 89c9b300..9e3b281b 100644 --- a/packages/dashboard/vite.config.ts +++ b/packages/dashboard/vite.config.ts @@ -24,6 +24,7 @@ export default defineConfig({ }), Pages({ dirs: ["./src/modules", "./src/pages", ], + exclude: ['**/*.ts'], // only load .vue as modules }), Layouts({ layoutsDirs: "./src/layouts/", diff --git a/yarn.lock b/yarn.lock index 72f997b1..0b80be45 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,7 +2,7 @@ # yarn lockfile v1 -"@ampproject/remapping@^2.1.0": +"@ampproject/remapping@^2.1.0", "@ampproject/remapping@^2.2.0": version "2.2.0" resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== @@ -22,11 +22,79 @@ dependencies: "@babel/highlight" "^7.18.6" +"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.18.8", "@babel/compat-data@^7.19.3": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.21.0.tgz#c241dc454e5b5917e40d37e525e2f4530c399298" + integrity sha512-gMuZsmsgxk/ENC3O/fRw5QY8A9/uxQbbCEypnLIiYYc/qVJtEV7ouxC3EllIIwNzMqAQee5tanFabWsUOutS7g== + "@babel/compat-data@^7.20.5": version "7.20.14" resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.20.14.tgz#4106fc8b755f3e3ee0a0a7c27dde5de1d2b2baf8" integrity sha512-0YpKHD6ImkWMEINCyDAD0HLLUH/lPCefG8ld9it8DJB2wnApraKuhgYTvTY1z7UFIfBTGy5LwncZ+5HWWGbhFw== +"@babel/core@7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.18.10.tgz#39ad504991d77f1f3da91be0b8b949a5bc466fb8" + integrity sha512-JQM6k6ENcBFKVtWvLavlvi/mPcpYZ3+R+2EySDEMSMbp7Mn4FexlbbJVrx2R7Ijhr01T8gyqrOaABWIOgxeUyw== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.18.10" + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-module-transforms" "^7.18.9" + "@babel/helpers" "^7.18.9" + "@babel/parser" "^7.18.10" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.18.10" + "@babel/types" "^7.18.10" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/core@7.19.3": + version "7.19.3" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.19.3.tgz#2519f62a51458f43b682d61583c3810e7dcee64c" + integrity sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.3" + "@babel/helper-compilation-targets" "^7.19.3" + "@babel/helper-module-transforms" "^7.19.0" + "@babel/helpers" "^7.19.0" + "@babel/parser" "^7.19.3" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.3" + "@babel/types" "^7.19.3" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/core@^7.11.6", "@babel/core@^7.12.3": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.21.0.tgz#1341aefdcc14ccc7553fcc688dd8986a2daffc13" + integrity sha512-PuxUbxcW6ZYe656yL3EAhpy7qXKq0DmYsrJLpbB8XrsCP9Nm+XCg9XFMb5vIDliPD7+U/+M+QJlH17XOcB7eXA== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.21.0" + "@babel/helper-compilation-targets" "^7.20.7" + "@babel/helper-module-transforms" "^7.21.0" + "@babel/helpers" "^7.21.0" + "@babel/parser" "^7.21.0" + "@babel/template" "^7.20.7" + "@babel/traverse" "^7.21.0" + "@babel/types" "^7.21.0" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.2" + semver "^6.3.0" + "@babel/core@^7.20.12", "@babel/core@^7.20.5": version "7.20.12" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.20.12.tgz#7930db57443c6714ad216953d1356dac0eb8496d" @@ -48,6 +116,34 @@ json5 "^2.2.2" semver "^6.3.0" +"@babel/generator@7.18.12": + version "7.18.12" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.18.12.tgz#fa58daa303757bd6f5e4bbca91b342040463d9f4" + integrity sha512-dfQ8ebCN98SvyL7IxNMCUtZQSq5R7kxgN+r8qYTGDmmSion1hX2C0zq2yo1bsCDhXixokv1SAWTZUMYbO/V5zg== + dependencies: + "@babel/types" "^7.18.10" + "@jridgewell/gen-mapping" "^0.3.2" + jsesc "^2.5.1" + +"@babel/generator@7.19.3": + version "7.19.3" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.19.3.tgz#d7f4d1300485b4547cb6f94b27d10d237b42bf59" + integrity sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ== + dependencies: + "@babel/types" "^7.19.3" + "@jridgewell/gen-mapping" "^0.3.2" + jsesc "^2.5.1" + +"@babel/generator@^7.18.10", "@babel/generator@^7.19.3", "@babel/generator@^7.21.0", "@babel/generator@^7.21.1": + version "7.21.1" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.21.1.tgz#951cc626057bc0af2c35cd23e9c64d384dea83dd" + integrity sha512-1lT45bAYlQhFn/BHivJs43AiW2rg3/UbLyShGfF3C0KmHvO5fSghWd5kBJy30kpRRucGzXStvnnCFniCR2kXAA== + dependencies: + "@babel/types" "^7.21.0" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" + "@babel/generator@^7.20.7": version "7.20.14" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.20.14.tgz#9fa772c9f86a46c6ac9b321039400712b96f64ce" @@ -64,7 +160,15 @@ dependencies: "@babel/types" "^7.18.6" -"@babel/helper-compilation-targets@^7.20.7": +"@babel/helper-builder-binary-assignment-operator-visitor@^7.18.6": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz#acd4edfd7a566d1d51ea975dff38fd52906981bb" + integrity sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw== + dependencies: + "@babel/helper-explode-assignable-expression" "^7.18.6" + "@babel/types" "^7.18.9" + +"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.18.9", "@babel/helper-compilation-targets@^7.19.3", "@babel/helper-compilation-targets@^7.20.7": version "7.20.7" resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.20.7.tgz#a6cd33e93629f5eb473b021aac05df62c4cd09bb" integrity sha512-4tGORmfQcrc+bvrjb5y3dG9Mx1IOZjsHqQVUz7XCNHO+iTmqxWnVg3KRygjGmpRLJGdQSKuvFinbIb0CnZwHAQ== @@ -75,6 +179,20 @@ lru-cache "^5.1.1" semver "^6.3.0" +"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.21.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.21.0.tgz#64f49ecb0020532f19b1d014b03bccaa1ab85fb9" + integrity sha512-Q8wNiMIdwsv5la5SPxNYzzkPnjgC0Sy0i7jLkVOCdllu/xcVNkr3TeZzbHBJrj+XXRqzX5uCyCoV9eu6xUG7KQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.21.0" + "@babel/helper-member-expression-to-functions" "^7.21.0" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-replace-supers" "^7.20.7" + "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-create-class-features-plugin@^7.20.12": version "7.20.12" resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.20.12.tgz#4349b928e79be05ed2d1643b20b99bb87c503819" @@ -89,11 +207,46 @@ "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" "@babel/helper-split-export-declaration" "^7.18.6" +"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.20.5": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.21.0.tgz#53ff78472e5ce10a52664272a239787107603ebb" + integrity sha512-N+LaFW/auRSWdx7SHD/HiARwXQju1vXTW4fKr4u5SgBUTm51OKEjKgj+cs00ggW3kEvNqwErnlwuq7Y3xBe4eg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + regexpu-core "^5.3.1" + +"@babel/helper-define-polyfill-provider@^0.3.2", "@babel/helper-define-polyfill-provider@^0.3.3": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz#8612e55be5d51f0cd1f36b4a5a83924e89884b7a" + integrity sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww== + dependencies: + "@babel/helper-compilation-targets" "^7.17.7" + "@babel/helper-plugin-utils" "^7.16.7" + debug "^4.1.1" + lodash.debounce "^4.0.8" + resolve "^1.14.2" + semver "^6.1.2" + "@babel/helper-environment-visitor@^7.18.9": version "7.18.9" resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== +"@babel/helper-explode-assignable-expression@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz#41f8228ef0a6f1a036b8dfdfec7ce94f9a6bc096" + integrity sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-function-name@^7.18.9", "@babel/helper-function-name@^7.21.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz#d552829b10ea9f120969304023cd0645fa00b1b4" + integrity sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg== + dependencies: + "@babel/template" "^7.20.7" + "@babel/types" "^7.21.0" + "@babel/helper-function-name@^7.19.0": version "7.19.0" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" @@ -116,6 +269,13 @@ dependencies: "@babel/types" "^7.20.7" +"@babel/helper-member-expression-to-functions@^7.21.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.21.0.tgz#319c6a940431a133897148515877d2f3269c3ba5" + integrity sha512-Muu8cdZwNN6mRRNG6lAYErJ5X3bRevgYR2O8wN0yn7jJSnGDu6eG59RfT29JHxGUovyfrh6Pj0XzmR7drNVL3Q== + dependencies: + "@babel/types" "^7.21.0" + "@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.18.6": version "7.18.6" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" @@ -123,6 +283,20 @@ dependencies: "@babel/types" "^7.18.6" +"@babel/helper-module-transforms@^7.18.6", "@babel/helper-module-transforms@^7.18.9", "@babel/helper-module-transforms@^7.19.0", "@babel/helper-module-transforms@^7.21.0", "@babel/helper-module-transforms@^7.21.2": + version "7.21.2" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.21.2.tgz#160caafa4978ac8c00ac66636cb0fa37b024e2d2" + integrity sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.20.2" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.19.1" + "@babel/template" "^7.20.7" + "@babel/traverse" "^7.21.2" + "@babel/types" "^7.21.2" + "@babel/helper-module-transforms@^7.20.11": version "7.20.11" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.20.11.tgz#df4c7af713c557938c50ea3ad0117a7944b2f1b0" @@ -144,12 +318,22 @@ dependencies: "@babel/types" "^7.18.6" -"@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.20.2": +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": version "7.20.2" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.20.2.tgz#d1b9000752b18d0877cff85a5c376ce5c3121629" integrity sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ== -"@babel/helper-replace-supers@^7.20.7": +"@babel/helper-remap-async-to-generator@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz#997458a0e3357080e54e1d79ec347f8a8cd28519" + integrity sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-wrap-function" "^7.18.9" + "@babel/types" "^7.18.9" + +"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.20.7": version "7.20.7" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.20.7.tgz#243ecd2724d2071532b2c8ad2f0f9f083bcae331" integrity sha512-vujDMtB6LVfNW13jhlCrp48QNslK6JXi7lQG736HVbHz/mbf4Dc7tIRh1Xf5C0rF7BP8iiSxGMCmY6Ci1ven3A== @@ -182,7 +366,7 @@ dependencies: "@babel/types" "^7.18.6" -"@babel/helper-string-parser@^7.19.4": +"@babel/helper-string-parser@^7.18.10", "@babel/helper-string-parser@^7.19.4": version "7.19.4" resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz#38d3acb654b4701a9b77fb0615a96f775c3a9e63" integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw== @@ -197,6 +381,30 @@ resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== +"@babel/helper-validator-option@^7.21.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.21.0.tgz#8224c7e13ace4bafdc4004da2cf064ef42673180" + integrity sha512-rmL/B8/f0mKS2baE9ZpyTcTavvEuWhTTW8amjzXNvYG4AwBsqTLikfXsEofsJEfKHf+HQVQbFOHy6o+4cnC/fQ== + +"@babel/helper-wrap-function@^7.18.9": + version "7.20.5" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.20.5.tgz#75e2d84d499a0ab3b31c33bcfe59d6b8a45f62e3" + integrity sha512-bYMxIWK5mh+TgXGVqAtnu5Yn1un+v8DDZtqyzKRLUzrh70Eal2O3aZ7aPYiMADO4uKlkzOiRiZ6GX5q3qxvW9Q== + dependencies: + "@babel/helper-function-name" "^7.19.0" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.20.5" + "@babel/types" "^7.20.5" + +"@babel/helpers@^7.18.9", "@babel/helpers@^7.19.0", "@babel/helpers@^7.21.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.21.0.tgz#9dd184fb5599862037917cdc9eecb84577dc4e7e" + integrity sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA== + dependencies: + "@babel/template" "^7.20.7" + "@babel/traverse" "^7.21.0" + "@babel/types" "^7.21.0" + "@babel/helpers@^7.20.7": version "7.20.13" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.20.13.tgz#e3cb731fb70dc5337134cadc24cbbad31cc87ad2" @@ -215,11 +423,241 @@ chalk "^2.0.0" js-tokens "^4.0.0" +"@babel/parser@7.18.11": + version "7.18.11" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.18.11.tgz#68bb07ab3d380affa9a3f96728df07969645d2d9" + integrity sha512-9JKn5vN+hDt0Hdqn1PiJ2guflwP+B6Ga8qbDuoF0PzzVhrzsKIJo8yGqVk6CmMHiMei9w1C1Bp9IMJSIK+HPIQ== + +"@babel/parser@^7.14.7", "@babel/parser@^7.18.10", "@babel/parser@^7.18.11", "@babel/parser@^7.19.3", "@babel/parser@^7.21.0", "@babel/parser@^7.21.2": + version "7.21.2" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.21.2.tgz#dacafadfc6d7654c3051a66d6fe55b6cb2f2a0b3" + integrity sha512-URpaIJQwEkEC2T9Kn+Ai6Xe/02iNaVCuT/PtoRz3GPVJVDpPd7mLo+VddTbhCRU9TXqW5mSrQfXZyi8kDKOVpQ== + "@babel/parser@^7.16.4", "@babel/parser@^7.20.13", "@babel/parser@^7.20.5", "@babel/parser@^7.20.7": version "7.20.15" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.20.15.tgz#eec9f36d8eaf0948bb88c87a46784b5ee9fd0c89" integrity sha512-DI4a1oZuf8wC+oAJA9RW6ga3Zbe8RZFt7kD9i4qAspz3I/yHet1VvC3DiSy/fsUvv5pvJuNPh0LPOdCcqinDPg== +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz#da5b8f9a580acdfbe53494dba45ea389fb09a4d2" + integrity sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.9": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.20.7.tgz#d9c85589258539a22a901033853101a6198d4ef1" + integrity sha512-sbr9+wNE5aXMBBFBICk01tt7sBf2Oc9ikRFEcem/ZORup9IMUdNhW7/wVLEbbtlWOsEubJet46mHAL2C8+2jKQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + "@babel/plugin-proposal-optional-chaining" "^7.20.7" + +"@babel/plugin-proposal-async-generator-functions@^7.18.10", "@babel/plugin-proposal-async-generator-functions@^7.19.1": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.20.7.tgz#bfb7276d2d573cb67ba379984a2334e262ba5326" + integrity sha512-xMbiLsn/8RK7Wq7VeVytytS2L6qE69bXPB10YCmMdDZbKF4okCqY74pI/jJQ/8U0b/F6NrT2+14b8/P9/3AMGA== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-remap-async-to-generator" "^7.18.9" + "@babel/plugin-syntax-async-generators" "^7.8.4" + +"@babel/plugin-proposal-class-properties@7.18.6", "@babel/plugin-proposal-class-properties@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" + integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-class-static-block@^7.18.6": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.21.0.tgz#77bdd66fb7b605f3a61302d224bdfacf5547977d" + integrity sha512-XP5G9MWNUskFuP30IfFSEFB0Z6HzLIUcjYM4bYOPHXl7eiJ9HFv8tWj6TXTN5QODiEhDZAeI4hLok2iHFFV4hw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.21.0" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-proposal-dynamic-import@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz#72bcf8d408799f547d759298c3c27c7e7faa4d94" + integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + +"@babel/plugin-proposal-export-default-from@7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-default-from/-/plugin-proposal-export-default-from-7.18.10.tgz#091f4794dbce4027c03cf4ebc64d3fb96b75c206" + integrity sha512-5H2N3R2aQFxkV4PIBUR/i7PUSwgTZjouJKzI8eKswfIjT0PhvzkPn0t0wIS5zn6maQuvtT0t1oHtMUz61LOuow== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-export-default-from" "^7.18.6" + +"@babel/plugin-proposal-export-namespace-from@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz#5f7313ab348cdb19d590145f9247540e94761203" + integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + +"@babel/plugin-proposal-json-strings@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz#7e8788c1811c393aff762817e7dbf1ebd0c05f0b" + integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + +"@babel/plugin-proposal-logical-assignment-operators@^7.18.9": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.20.7.tgz#dfbcaa8f7b4d37b51e8bfb46d94a5aea2bb89d83" + integrity sha512-y7C7cZgpMIjWlKE5T7eJwp+tnRYM89HmRvWM5EQuB5BoHEONjmQ8lSNmBUwOyy/GFRsohJED51YBF79hE1djug== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + +"@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" + integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + +"@babel/plugin-proposal-numeric-separator@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" + integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-proposal-object-rest-spread@7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.9.tgz#f9434f6beb2c8cae9dfcf97d2a5941bbbf9ad4e7" + integrity sha512-kDDHQ5rflIeY5xl69CEqGEZ0KY369ehsCIEbTGb4siHG5BE9sga/T0r0OUwyZNLMmZE79E1kbsqAjwFCW4ds6Q== + dependencies: + "@babel/compat-data" "^7.18.8" + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.18.8" + +"@babel/plugin-proposal-object-rest-spread@^7.18.9": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz#aa662940ef425779c75534a5c41e9d936edc390a" + integrity sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg== + dependencies: + "@babel/compat-data" "^7.20.5" + "@babel/helper-compilation-targets" "^7.20.7" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.20.7" + +"@babel/plugin-proposal-optional-catch-binding@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz#f9400d0e6a3ea93ba9ef70b09e72dd6da638a2cb" + integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + +"@babel/plugin-proposal-optional-chaining@^7.18.9", "@babel/plugin-proposal-optional-chaining@^7.20.7": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.21.0.tgz#886f5c8978deb7d30f678b2e24346b287234d3ea" + integrity sha512-p4zeefM72gpmEe2fkUr/OnOXpWEf8nAgk7ZYVqqfFiyIG7oFfVZcCrU64hWn5xp4tQ9LkV4bTIa5rD0KANpKNA== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-proposal-private-methods@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea" + integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-private-property-in-object@^7.18.6": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0.tgz#19496bd9883dd83c23c7d7fc45dcd9ad02dfa1dc" + integrity sha512-ha4zfehbJjc5MmXBlHec1igel5TJXXLDDRbuJ4+XT2TJcyD9/V1919BA8gMvsdHcNMBy4WBUBiRb3nw/EQUtBw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-create-class-features-plugin" "^7.21.0" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + +"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz#af613d2cd5e643643b65cded64207b15c85cb78e" + integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-class-static-block@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-dynamic-import@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-export-default-from@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-default-from/-/plugin-syntax-export-default-from-7.18.6.tgz#8df076711a4818c4ce4f23e61d622b0ba2ff84bc" + integrity sha512-Kr//z3ujSVNx6E9z9ih5xXXMqK07VVTuqPmqGe6Mss/zW5XPeLZeSDZoP9ab/hT4wPKqAgjl2PnhPrcpk8Seew== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + +"@babel/plugin-syntax-import-assertions@^7.18.6": + version "7.20.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.20.0.tgz#bb50e0d4bea0957235390641209394e87bdb9cc4" + integrity sha512-IUh1vakzNoWalR8ch/areW7qFopR2AEw03JlG7BbrDqmQ4X3q9uuipQwSGrUn7oGiemKjtSLDhNtQHzMHr1JdQ== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + "@babel/plugin-syntax-jsx@^7.0.0": version "7.18.6" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0" @@ -227,6 +665,62 @@ dependencies: "@babel/helper-plugin-utils" "^7.18.6" +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-private-property-in-object@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-top-level-await@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-typescript@^7.20.0": version "7.20.0" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.20.0.tgz#4e9a0cfc769c85689b77a2e642d24e9f697fc8c7" @@ -234,6 +728,266 @@ dependencies: "@babel/helper-plugin-utils" "^7.19.0" +"@babel/plugin-transform-arrow-functions@^7.18.6": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.20.7.tgz#bea332b0e8b2dab3dafe55a163d8227531ab0551" + integrity sha512-3poA5E7dzDomxj9WXWwuD6A5F3kc7VXwIJO+E+J8qtDtS+pXPAhrgEyh+9GBwBgPq1Z+bB+/JD60lp5jsN7JPQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-async-to-generator@^7.18.6": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.20.7.tgz#dfee18623c8cb31deb796aa3ca84dda9cea94354" + integrity sha512-Uo5gwHPT9vgnSXQxqGtpdufUiWp96gk7yiP4Mp5bm1QMkEmLXBO7PAGYbKoJ6DhAwiNkcHFBol/x5zZZkL/t0Q== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-remap-async-to-generator" "^7.18.9" + +"@babel/plugin-transform-block-scoped-functions@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz#9187bf4ba302635b9d70d986ad70f038726216a8" + integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-block-scoping@^7.18.9": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.21.0.tgz#e737b91037e5186ee16b76e7ae093358a5634f02" + integrity sha512-Mdrbunoh9SxwFZapeHVrwFmri16+oYotcZysSzhNIVDwIAb1UV+kvnxULSYq9J3/q5MDG+4X6w8QVgD1zhBXNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-classes@^7.18.9", "@babel/plugin-transform-classes@^7.19.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.21.0.tgz#f469d0b07a4c5a7dbb21afad9e27e57b47031665" + integrity sha512-RZhbYTCEUAe6ntPehC4hlslPWosNHDox+vAs4On/mCLRLfoDVHf6hVEd7kuxr1RnHwJmxFfUM3cZiZRmPxJPXQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-compilation-targets" "^7.20.7" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.21.0" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-replace-supers" "^7.20.7" + "@babel/helper-split-export-declaration" "^7.18.6" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.18.9": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.20.7.tgz#704cc2fd155d1c996551db8276d55b9d46e4d0aa" + integrity sha512-Lz7MvBK6DTjElHAmfu6bfANzKcxpyNPeYBGEafyA6E5HtRpjpZwU+u7Qrgz/2OR0z+5TvKYbPdphfSaAcZBrYQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/template" "^7.20.7" + +"@babel/plugin-transform-destructuring@^7.18.13", "@babel/plugin-transform-destructuring@^7.18.9": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.20.7.tgz#8bda578f71620c7de7c93af590154ba331415454" + integrity sha512-Xwg403sRrZb81IVB79ZPqNQME23yhugYVqgTxAhT99h485F4f+GMELFhhOsscDUB7HCswepKeCKLn/GZvUKoBA== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz#b286b3e7aae6c7b861e45bed0a2fafd6b1a4fef8" + integrity sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-duplicate-keys@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz#687f15ee3cdad6d85191eb2a372c4528eaa0ae0e" + integrity sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-exponentiation-operator@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz#421c705f4521888c65e91fdd1af951bfefd4dacd" + integrity sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-for-of@^7.18.8": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.21.0.tgz#964108c9988de1a60b4be2354a7d7e245f36e86e" + integrity sha512-LlUYlydgDkKpIY7mcBWvyPPmMcOphEyYA27Ef4xpbh1IiDNLr0kZsos2nf92vz3IccvJI25QUwp86Eo5s6HmBQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-function-name@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz#cc354f8234e62968946c61a46d6365440fc764e0" + integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ== + dependencies: + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-literals@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz#72796fdbef80e56fba3c6a699d54f0de557444bc" + integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-member-expression-literals@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz#ac9fdc1a118620ac49b7e7a5d2dc177a1bfee88e" + integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-modules-amd@^7.18.6": + version "7.20.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.20.11.tgz#3daccca8e4cc309f03c3a0c4b41dc4b26f55214a" + integrity sha512-NuzCt5IIYOW0O30UvqktzHYR2ud5bOWbY0yaxWZ6G+aFzOMJvrs5YHNikrbdaT15+KNO31nPOy5Fim3ku6Zb5g== + dependencies: + "@babel/helper-module-transforms" "^7.20.11" + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-modules-commonjs@^7.18.6": + version "7.21.2" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.21.2.tgz#6ff5070e71e3192ef2b7e39820a06fb78e3058e7" + integrity sha512-Cln+Yy04Gxua7iPdj6nOV96smLGjpElir5YwzF0LBPKoPlLDNJePNlrGGaybAJkd0zKRnOVXOgizSqPYMNYkzA== + dependencies: + "@babel/helper-module-transforms" "^7.21.2" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-simple-access" "^7.20.2" + +"@babel/plugin-transform-modules-systemjs@^7.18.9", "@babel/plugin-transform-modules-systemjs@^7.19.0": + version "7.20.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.20.11.tgz#467ec6bba6b6a50634eea61c9c232654d8a4696e" + integrity sha512-vVu5g9BPQKSFEmvt2TA4Da5N+QVS66EX21d8uoOihC+OCpUoGvzVsXeqFdtAEfVa5BILAeFt+U7yVmLbQnAJmw== + dependencies: + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-module-transforms" "^7.20.11" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-validator-identifier" "^7.19.1" + +"@babel/plugin-transform-modules-umd@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz#81d3832d6034b75b54e62821ba58f28ed0aab4b9" + integrity sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.18.6", "@babel/plugin-transform-named-capturing-groups-regex@^7.19.1": + version "7.20.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.20.5.tgz#626298dd62ea51d452c3be58b285d23195ba69a8" + integrity sha512-mOW4tTzi5iTLnw+78iEq3gr8Aoq4WNRGpmSlrogqaiCBoR1HFhpU4JkpQFOHfeYx3ReVIFWOQJS4aZBRvuZ6mA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.20.5" + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-new-target@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz#d128f376ae200477f37c4ddfcc722a8a1b3246a8" + integrity sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-object-super@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz#fb3c6ccdd15939b6ff7939944b51971ddc35912c" + integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-replace-supers" "^7.18.6" + +"@babel/plugin-transform-parameters@^7.18.8", "@babel/plugin-transform-parameters@^7.20.7": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.20.7.tgz#0ee349e9d1bc96e78e3b37a7af423a4078a7083f" + integrity sha512-WiWBIkeHKVOSYPO0pWkxGPfKeWrCJyD3NJ53+Lrp/QMSZbsVPovrVl2aWZ19D/LTVnaDv5Ap7GJ/B2CTOZdrfA== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-property-literals@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz#e22498903a483448e94e032e9bbb9c5ccbfc93a3" + integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-regenerator@^7.18.6": + version "7.20.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.20.5.tgz#57cda588c7ffb7f4f8483cc83bdcea02a907f04d" + integrity sha512-kW/oO7HPBtntbsahzQ0qSE3tFvkFwnbozz3NWFhLGqH75vLEg+sCGngLlhVkePlCs3Jv0dBBHDzCHxNiFAQKCQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + regenerator-transform "^0.15.1" + +"@babel/plugin-transform-reserved-words@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz#b1abd8ebf8edaa5f7fe6bbb8d2133d23b6a6f76a" + integrity sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-runtime@7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.18.10.tgz#37d14d1fa810a368fd635d4d1476c0154144a96f" + integrity sha512-q5mMeYAdfEbpBAgzl7tBre/la3LeCxmDO1+wMXRdPWbcoMjR3GiXlCLk7JBZVVye0bqTGNMbt0yYVXX1B1jEWQ== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.9" + babel-plugin-polyfill-corejs2 "^0.3.2" + babel-plugin-polyfill-corejs3 "^0.5.3" + babel-plugin-polyfill-regenerator "^0.4.0" + semver "^6.3.0" + +"@babel/plugin-transform-runtime@7.19.1": + version "7.19.1" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.19.1.tgz#a3df2d7312eea624c7889a2dcd37fd1dfd25b2c6" + integrity sha512-2nJjTUFIzBMP/f/miLxEK9vxwW/KUXsdvN4sR//TmuDhe6yU2h57WmIOE12Gng3MDP/xpjUV/ToZRdcf8Yj4fA== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.19.0" + babel-plugin-polyfill-corejs2 "^0.3.3" + babel-plugin-polyfill-corejs3 "^0.6.0" + babel-plugin-polyfill-regenerator "^0.4.1" + semver "^6.3.0" + +"@babel/plugin-transform-shorthand-properties@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz#6d6df7983d67b195289be24909e3f12a8f664dc9" + integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-spread@^7.18.9", "@babel/plugin-transform-spread@^7.19.0": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.20.7.tgz#c2d83e0b99d3bf83e07b11995ee24bf7ca09401e" + integrity sha512-ewBbHQ+1U/VnH1fxltbJqDeWBU1oNLG8Dj11uIv3xVf7nrQu0bPGe5Rf716r7K5Qz+SqtAOVswoVunoiBtGhxw== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + +"@babel/plugin-transform-sticky-regex@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz#c6706eb2b1524028e317720339583ad0f444adcc" + integrity sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-template-literals@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz#04ec6f10acdaa81846689d63fae117dd9c243a5e" + integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-typeof-symbol@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz#c8cea68263e45addcd6afc9091429f80925762c0" + integrity sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-transform-typescript@^7.20.2": version "7.20.13" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.20.13.tgz#e3581b356b8694f6ff450211fe6774eaff8d25ab" @@ -243,6 +997,224 @@ "@babel/helper-plugin-utils" "^7.20.2" "@babel/plugin-syntax-typescript" "^7.20.0" +"@babel/plugin-transform-typescript@^7.21.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.21.0.tgz#f0956a153679e3b377ae5b7f0143427151e4c848" + integrity sha512-xo///XTPp3mDzTtrqXoBlK9eiAYW3wv9JXglcn/u1bi60RW11dEUxIgA8cbnDhutS1zacjMRmAwxE0gMklLnZg== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.21.0" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-typescript" "^7.20.0" + +"@babel/plugin-transform-unicode-escapes@^7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz#1ecfb0eda83d09bbcb77c09970c2dd55832aa246" + integrity sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-unicode-regex@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz#194317225d8c201bbae103364ffe9e2cea36cdca" + integrity sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/preset-env@7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.18.10.tgz#83b8dfe70d7eea1aae5a10635ab0a5fe60dfc0f4" + integrity sha512-wVxs1yjFdW3Z/XkNfXKoblxoHgbtUF7/l3PvvP4m02Qz9TZ6uZGxRVYjSQeR87oQmHco9zWitW5J82DJ7sCjvA== + dependencies: + "@babel/compat-data" "^7.18.8" + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-async-generator-functions" "^7.18.10" + "@babel/plugin-proposal-class-properties" "^7.18.6" + "@babel/plugin-proposal-class-static-block" "^7.18.6" + "@babel/plugin-proposal-dynamic-import" "^7.18.6" + "@babel/plugin-proposal-export-namespace-from" "^7.18.9" + "@babel/plugin-proposal-json-strings" "^7.18.6" + "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" + "@babel/plugin-proposal-numeric-separator" "^7.18.6" + "@babel/plugin-proposal-object-rest-spread" "^7.18.9" + "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-private-methods" "^7.18.6" + "@babel/plugin-proposal-private-property-in-object" "^7.18.6" + "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-import-assertions" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-transform-arrow-functions" "^7.18.6" + "@babel/plugin-transform-async-to-generator" "^7.18.6" + "@babel/plugin-transform-block-scoped-functions" "^7.18.6" + "@babel/plugin-transform-block-scoping" "^7.18.9" + "@babel/plugin-transform-classes" "^7.18.9" + "@babel/plugin-transform-computed-properties" "^7.18.9" + "@babel/plugin-transform-destructuring" "^7.18.9" + "@babel/plugin-transform-dotall-regex" "^7.18.6" + "@babel/plugin-transform-duplicate-keys" "^7.18.9" + "@babel/plugin-transform-exponentiation-operator" "^7.18.6" + "@babel/plugin-transform-for-of" "^7.18.8" + "@babel/plugin-transform-function-name" "^7.18.9" + "@babel/plugin-transform-literals" "^7.18.9" + "@babel/plugin-transform-member-expression-literals" "^7.18.6" + "@babel/plugin-transform-modules-amd" "^7.18.6" + "@babel/plugin-transform-modules-commonjs" "^7.18.6" + "@babel/plugin-transform-modules-systemjs" "^7.18.9" + "@babel/plugin-transform-modules-umd" "^7.18.6" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.18.6" + "@babel/plugin-transform-new-target" "^7.18.6" + "@babel/plugin-transform-object-super" "^7.18.6" + "@babel/plugin-transform-parameters" "^7.18.8" + "@babel/plugin-transform-property-literals" "^7.18.6" + "@babel/plugin-transform-regenerator" "^7.18.6" + "@babel/plugin-transform-reserved-words" "^7.18.6" + "@babel/plugin-transform-shorthand-properties" "^7.18.6" + "@babel/plugin-transform-spread" "^7.18.9" + "@babel/plugin-transform-sticky-regex" "^7.18.6" + "@babel/plugin-transform-template-literals" "^7.18.9" + "@babel/plugin-transform-typeof-symbol" "^7.18.9" + "@babel/plugin-transform-unicode-escapes" "^7.18.10" + "@babel/plugin-transform-unicode-regex" "^7.18.6" + "@babel/preset-modules" "^0.1.5" + "@babel/types" "^7.18.10" + babel-plugin-polyfill-corejs2 "^0.3.2" + babel-plugin-polyfill-corejs3 "^0.5.3" + babel-plugin-polyfill-regenerator "^0.4.0" + core-js-compat "^3.22.1" + semver "^6.3.0" + +"@babel/preset-env@7.19.3": + version "7.19.3" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.19.3.tgz#52cd19abaecb3f176a4ff9cc5e15b7bf06bec754" + integrity sha512-ziye1OTc9dGFOAXSWKUqQblYHNlBOaDl8wzqf2iKXJAltYiR3hKHUKmkt+S9PppW7RQpq4fFCrwwpIDj/f5P4w== + dependencies: + "@babel/compat-data" "^7.19.3" + "@babel/helper-compilation-targets" "^7.19.3" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-async-generator-functions" "^7.19.1" + "@babel/plugin-proposal-class-properties" "^7.18.6" + "@babel/plugin-proposal-class-static-block" "^7.18.6" + "@babel/plugin-proposal-dynamic-import" "^7.18.6" + "@babel/plugin-proposal-export-namespace-from" "^7.18.9" + "@babel/plugin-proposal-json-strings" "^7.18.6" + "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" + "@babel/plugin-proposal-numeric-separator" "^7.18.6" + "@babel/plugin-proposal-object-rest-spread" "^7.18.9" + "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-private-methods" "^7.18.6" + "@babel/plugin-proposal-private-property-in-object" "^7.18.6" + "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-import-assertions" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-transform-arrow-functions" "^7.18.6" + "@babel/plugin-transform-async-to-generator" "^7.18.6" + "@babel/plugin-transform-block-scoped-functions" "^7.18.6" + "@babel/plugin-transform-block-scoping" "^7.18.9" + "@babel/plugin-transform-classes" "^7.19.0" + "@babel/plugin-transform-computed-properties" "^7.18.9" + "@babel/plugin-transform-destructuring" "^7.18.13" + "@babel/plugin-transform-dotall-regex" "^7.18.6" + "@babel/plugin-transform-duplicate-keys" "^7.18.9" + "@babel/plugin-transform-exponentiation-operator" "^7.18.6" + "@babel/plugin-transform-for-of" "^7.18.8" + "@babel/plugin-transform-function-name" "^7.18.9" + "@babel/plugin-transform-literals" "^7.18.9" + "@babel/plugin-transform-member-expression-literals" "^7.18.6" + "@babel/plugin-transform-modules-amd" "^7.18.6" + "@babel/plugin-transform-modules-commonjs" "^7.18.6" + "@babel/plugin-transform-modules-systemjs" "^7.19.0" + "@babel/plugin-transform-modules-umd" "^7.18.6" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.19.1" + "@babel/plugin-transform-new-target" "^7.18.6" + "@babel/plugin-transform-object-super" "^7.18.6" + "@babel/plugin-transform-parameters" "^7.18.8" + "@babel/plugin-transform-property-literals" "^7.18.6" + "@babel/plugin-transform-regenerator" "^7.18.6" + "@babel/plugin-transform-reserved-words" "^7.18.6" + "@babel/plugin-transform-shorthand-properties" "^7.18.6" + "@babel/plugin-transform-spread" "^7.19.0" + "@babel/plugin-transform-sticky-regex" "^7.18.6" + "@babel/plugin-transform-template-literals" "^7.18.9" + "@babel/plugin-transform-typeof-symbol" "^7.18.9" + "@babel/plugin-transform-unicode-escapes" "^7.18.10" + "@babel/plugin-transform-unicode-regex" "^7.18.6" + "@babel/preset-modules" "^0.1.5" + "@babel/types" "^7.19.3" + babel-plugin-polyfill-corejs2 "^0.3.3" + babel-plugin-polyfill-corejs3 "^0.6.0" + babel-plugin-polyfill-regenerator "^0.4.1" + core-js-compat "^3.25.1" + semver "^6.3.0" + +"@babel/preset-modules@^0.1.5": + version "0.1.5" + resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" + integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" + "@babel/plugin-transform-dotall-regex" "^7.4.4" + "@babel/types" "^7.4.4" + esutils "^2.0.2" + +"@babel/preset-typescript@^7.17.12", "@babel/preset-typescript@^7.18.6": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.21.0.tgz#bcbbca513e8213691fe5d4b23d9251e01f00ebff" + integrity sha512-myc9mpoVA5m1rF8K8DgLEatOYFDpwC+RkMkjZ0Du6uI62YvDe8uxIEYVs/VCdSJ097nlALiU/yBC7//3nI+hNg== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-validator-option" "^7.21.0" + "@babel/plugin-transform-typescript" "^7.21.0" + +"@babel/regjsgen@^0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" + integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== + +"@babel/runtime@^7.11.2", "@babel/runtime@^7.18.9", "@babel/runtime@^7.8.4": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.21.0.tgz#5b55c9d394e5fcf304909a8b00c07dc217b56673" + integrity sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw== + dependencies: + regenerator-runtime "^0.13.11" + "@babel/runtime@^7.19.0", "@babel/runtime@^7.19.4": version "7.20.13" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.20.13.tgz#7055ab8a7cff2b8f6058bf6ae45ff84ad2aded4b" @@ -264,6 +1236,38 @@ "@babel/parser" "^7.20.7" "@babel/types" "^7.20.7" +"@babel/traverse@7.18.11": + version "7.18.11" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.18.11.tgz#3d51f2afbd83ecf9912bcbb5c4d94e3d2ddaa16f" + integrity sha512-TG9PiM2R/cWCAy6BPJKeHzNbu4lPzOSZpeMfeNErskGpTJx6trEvFaVCbDvpcxwy49BKWmEPwiW8mrysNiDvIQ== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.18.10" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.18.11" + "@babel/types" "^7.18.10" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/traverse@7.19.3": + version "7.19.3" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.19.3.tgz#3a3c5348d4988ba60884e8494b0592b2f15a04b4" + integrity sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.3" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.19.3" + "@babel/types" "^7.19.3" + debug "^4.1.0" + globals "^11.1.0" + "@babel/traverse@^7.0.0", "@babel/traverse@^7.20.10", "@babel/traverse@^7.20.12", "@babel/traverse@^7.20.13", "@babel/traverse@^7.20.7": version "7.20.13" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.20.13.tgz#817c1ba13d11accca89478bd5481b2d168d07473" @@ -280,6 +1284,40 @@ debug "^4.1.0" globals "^11.1.0" +"@babel/traverse@^7.18.10", "@babel/traverse@^7.19.3", "@babel/traverse@^7.20.5", "@babel/traverse@^7.21.0", "@babel/traverse@^7.21.2": + version "7.21.2" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.21.2.tgz#ac7e1f27658750892e815e60ae90f382a46d8e75" + integrity sha512-ts5FFU/dSUPS13tv8XiEObDu9K+iagEKME9kAbaP7r0Y9KtZJZ+NGndDvWoRAYNpeWafbpFeki3q9QoMD6gxyw== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.21.1" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.21.0" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.21.2" + "@babel/types" "^7.21.2" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.18.10.tgz#4908e81b6b339ca7c6b7a555a5fc29446f26dde6" + integrity sha512-MJvnbEiiNkpjo+LknnmRrqbY1GPUUggjv+wQVjetM/AONoupqRALB7I6jGqNUAZsKcRIEu2J6FRFvsczljjsaQ== + dependencies: + "@babel/helper-string-parser" "^7.18.10" + "@babel/helper-validator-identifier" "^7.18.6" + to-fast-properties "^2.0.0" + +"@babel/types@7.19.3": + version "7.19.3" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.19.3.tgz#fc420e6bbe54880bce6779ffaf315f5e43ec9624" + integrity sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw== + dependencies: + "@babel/helper-string-parser" "^7.18.10" + "@babel/helper-validator-identifier" "^7.19.1" + to-fast-properties "^2.0.0" + "@babel/types@^7.0.0", "@babel/types@^7.18.6", "@babel/types@^7.19.0", "@babel/types@^7.20.0", "@babel/types@^7.20.2", "@babel/types@^7.20.5", "@babel/types@^7.20.7": version "7.20.7" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.20.7.tgz#54ec75e252318423fc07fb644dc6a58a64c09b7f" @@ -289,6 +1327,15 @@ "@babel/helper-validator-identifier" "^7.19.1" to-fast-properties "^2.0.0" +"@babel/types@^7.18.10", "@babel/types@^7.18.9", "@babel/types@^7.19.3", "@babel/types@^7.21.0", "@babel/types@^7.21.2", "@babel/types@^7.4.4": + version "7.21.2" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.21.2.tgz#92246f6e00f91755893c2876ad653db70c8310d1" + integrity sha512-3wRZSs7jiFaB8AjxiiD+VqN5DTG2iRvJGQ+qYFrs/654lg6kGTQWIOFjlBo5RaXuAZjBmP3+OQH4dmhqiiyYxw== + dependencies: + "@babel/helper-string-parser" "^7.19.4" + "@babel/helper-validator-identifier" "^7.19.1" + to-fast-properties "^2.0.0" + "@casl/ability@^6.3.3": version "6.3.3" resolved "https://registry.yarnpkg.com/@casl/ability/-/ability-6.3.3.tgz#219e958f191cd2163482abb6a5196593d319fc2a" @@ -455,6 +1502,39 @@ resolved "https://registry.yarnpkg.com/@cosmjs/utils/-/utils-0.29.5.tgz#3fed1b3528ae8c5f1eb5d29b68755bebfd3294ee" integrity sha512-m7h+RXDUxOzEOGt4P+3OVPX7PuakZT3GBmaM/Y2u+abN3xZkziykD/NvedYFvvCCdQo714XcGl33bwifS9FZPQ== +"@cosmwasm/ts-codegen@0.21.1": + version "0.21.1" + resolved "https://registry.yarnpkg.com/@cosmwasm/ts-codegen/-/ts-codegen-0.21.1.tgz#abbb15fdb8f1c966079de49e0da0f847fa5045fe" + integrity sha512-6Rp1zKJLL08H0wMpXuEcvTWx29mR/pNlBS/2S6jTW8h+NzVlDfXQcLm42gpnpb7CzgW8rt1GMNZ3mCCdTDNuSA== + dependencies: + "@babel/core" "7.18.10" + "@babel/generator" "7.18.12" + "@babel/parser" "7.18.11" + "@babel/plugin-proposal-class-properties" "7.18.6" + "@babel/plugin-proposal-export-default-from" "7.18.10" + "@babel/plugin-proposal-object-rest-spread" "7.18.9" + "@babel/plugin-transform-runtime" "7.18.10" + "@babel/preset-env" "7.18.10" + "@babel/preset-typescript" "^7.18.6" + "@babel/runtime" "^7.18.9" + "@babel/traverse" "7.18.11" + "@babel/types" "7.18.10" + "@pyramation/json-schema-to-typescript" " 11.0.4" + case "1.6.3" + dargs "7.0.0" + deepmerge "4.2.2" + dotty "0.1.2" + fuzzy "0.1.3" + glob "8.0.3" + inquirerer "0.1.3" + long "^5.2.0" + minimist "1.2.6" + mkdirp "1.0.4" + parse-package-name "1.0.0" + rimraf "3.0.2" + shelljs "0.8.5" + wasm-ast-types "^0.15.0" + "@esbuild/android-arm64@0.16.17": version "0.16.17" resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.16.17.tgz#cf91e86df127aa3d141744edafcba0abdc577d23" @@ -718,6 +1798,62 @@ resolved "https://registry.yarnpkg.com/@isaacs/string-locale-compare/-/string-locale-compare-1.1.0.tgz#291c227e93fd407a96ecd59879a35809120e432b" integrity sha512-SQ7Kzhh9+D+ZW9MA0zkYv3VXhIDNx+LzM6EJ+/65I3QY+enU6Itte7E5XX7EWrqLW2FN4n06GWzBnPoC3th2aQ== +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/schemas@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-28.1.3.tgz#ad8b86a66f11f33619e3d7e1dcddd7f2d40ff905" + integrity sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== + dependencies: + "@sinclair/typebox" "^0.24.1" + +"@jest/transform@28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-28.1.3.tgz#59d8098e50ab07950e0f2fc0fc7ec462371281b0" + integrity sha512-u5dT5di+oFI6hfcLOHGTAfmUxFRrjK+vnaP0kkVow9Md/M7V/MxqQMOz/VV25UZO8pzeA9PjfTpOu6BDuwSPQA== + dependencies: + "@babel/core" "^7.11.6" + "@jest/types" "^28.1.3" + "@jridgewell/trace-mapping" "^0.3.13" + babel-plugin-istanbul "^6.1.1" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^28.1.3" + jest-regex-util "^28.0.2" + jest-util "^28.1.3" + micromatch "^4.0.4" + pirates "^4.0.4" + slash "^3.0.0" + write-file-atomic "^4.0.1" + +"@jest/types@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-28.1.3.tgz#b05de80996ff12512bc5ceb1d208285a7d11748b" + integrity sha512-RyjiyMUZrKz/c+zlMFO1pm70DcIlST8AeWTkoUdZevew44wcNZQHsEVOiCVtgVnlFFD82FPaXycys58cf2muVQ== + dependencies: + "@jest/schemas" "^28.1.3" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + "@jridgewell/gen-mapping@^0.1.0": version "0.1.1" resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" @@ -750,7 +1886,7 @@ resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== -"@jridgewell/trace-mapping@^0.3.9": +"@jridgewell/trace-mapping@^0.3.13", "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": version "0.3.17" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.17.tgz#793041277af9073b0951a7fe0f0d8c4c98c36985" integrity sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g== @@ -758,6 +1894,11 @@ "@jridgewell/resolve-uri" "3.1.0" "@jridgewell/sourcemap-codec" "1.4.14" +"@jsdevtools/ono@^7.1.3": + version "7.1.3" + resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796" + integrity sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg== + "@lerna/add@6.4.1": version "6.4.1" resolved "https://registry.yarnpkg.com/@lerna/add/-/add-6.4.1.tgz#fa20fe9ff875dc5758141262c8cde0d9a6481ec4" @@ -1777,13 +2918,19 @@ dependencies: "@octokit/openapi-types" "^16.0.0" -"@osmonauts/lcd@^0.10.0": - version "0.10.0" - resolved "https://registry.yarnpkg.com/@osmonauts/lcd/-/lcd-0.10.0.tgz#c982942dc13d7f6ab2a7873b289c7907e2169b14" - integrity sha512-PzmXk9x9MHyLn2fUztpAqWqvDmMiEJaQv/JcAoAOE8VdHrD9Hf/KWnE1RZtamuS2ngQRqvQPD0xotCGXW7eTxA== +"@osmonauts/ast@^0.76.2": + version "0.76.2" + resolved "https://registry.yarnpkg.com/@osmonauts/ast/-/ast-0.76.2.tgz#904968e1c47647f32d556396493b0129075897b9" + integrity sha512-tsbb9ydihCN8gAwhJyMkBeieqmodCcWTiDqmwW/QVVS4TiNiQmdArBh//L8G8z+8ORx72WcymbPYOD3c8gD/Tg== dependencies: + "@babel/parser" "^7.19.3" "@babel/runtime" "^7.19.0" - axios "0.27.2" + "@babel/types" "7.19.3" + "@osmonauts/proto-parser" "^0.38.0" + "@osmonauts/types" "^0.30.0" + "@osmonauts/utils" "^0.10.0" + case "1.6.3" + dotty "0.1.2" "@osmonauts/lcd@^0.8.0": version "0.8.0" @@ -1793,6 +2940,73 @@ "@babel/runtime" "^7.19.0" axios "0.27.2" +"@osmonauts/proto-parser@^0.38.0": + version "0.38.0" + resolved "https://registry.yarnpkg.com/@osmonauts/proto-parser/-/proto-parser-0.38.0.tgz#bc6ccd69b089188c9bffb00e63336d7c6df3a8f4" + integrity sha512-iISvFjboaHdttKHwn9kQA2rwT41LnhQEcObojT+9mHZ9+OyisP7GPcAkQ8gcSnPtQ9o/ul65kbleuO0SDyuO2Q== + dependencies: + "@babel/runtime" "^7.19.0" + "@osmonauts/types" "^0.30.0" + "@pyramation/protobufjs" "6.11.5" + dotty "0.1.2" + glob "8.0.3" + minimatch "5.1.0" + mkdirp "1.0.4" + +"@osmonauts/telescope@^0.88.2": + version "0.88.2" + resolved "https://registry.yarnpkg.com/@osmonauts/telescope/-/telescope-0.88.2.tgz#f4df43386f5bbd328e1537fa255f17a98a5e7ee5" + integrity sha512-9SBldjUWDwtrGgTzSIA8VM/TySTIt8Jc5VR5LEv9yfwKcbYOj57QOVccq5oty19djZPn7g+m76RpBedHxSFazQ== + dependencies: + "@babel/core" "7.19.3" + "@babel/generator" "7.19.3" + "@babel/parser" "^7.19.3" + "@babel/plugin-proposal-class-properties" "7.18.6" + "@babel/plugin-proposal-export-default-from" "7.18.10" + "@babel/plugin-proposal-object-rest-spread" "7.18.9" + "@babel/plugin-transform-runtime" "7.19.1" + "@babel/preset-env" "7.19.3" + "@babel/preset-typescript" "^7.17.12" + "@babel/runtime" "^7.19.0" + "@babel/traverse" "7.19.3" + "@babel/types" "7.19.3" + "@cosmwasm/ts-codegen" "0.21.1" + "@osmonauts/ast" "^0.76.2" + "@osmonauts/proto-parser" "^0.38.0" + "@osmonauts/types" "^0.30.0" + "@osmonauts/utils" "^0.10.0" + "@types/parse-package-name" "0.1.0" + case "1.6.3" + dargs "7.0.0" + deepmerge "4.2.2" + dotty "0.1.2" + fuzzy "0.1.3" + glob "8.0.3" + inquirerer "0.1.3" + long "^5.2.0" + minimatch "5.1.0" + minimist "1.2.6" + mkdirp "1.0.4" + parse-package-name "1.0.0" + rimraf "3.0.2" + shelljs "0.8.5" + +"@osmonauts/types@^0.30.0": + version "0.30.0" + resolved "https://registry.yarnpkg.com/@osmonauts/types/-/types-0.30.0.tgz#c1dcc0d3e285bfe853fe17a51f60720ea547a153" + integrity sha512-j/kE+yb8RHG/yiCza+Ssv92F9jLjU52FpPZ9FqJEVr+l7A49He2GQ64B3lScyc6dz6hV4J88m+X0ixvge8VJqQ== + dependencies: + "@babel/runtime" "^7.19.0" + "@osmonauts/utils" "^0.10.0" + case "1.6.3" + +"@osmonauts/utils@^0.10.0": + version "0.10.0" + resolved "https://registry.yarnpkg.com/@osmonauts/utils/-/utils-0.10.0.tgz#ad7011327375bb1028daeca7ccab172bec084050" + integrity sha512-rPPTZ6CE6BCgac3yhXlJcP5315Ddf1pnVCmYxsZi1Y5Vcyzzd6u2bfjPzaa0JOKiDtKwEGXXlVLAT29OM4DMhg== + dependencies: + "@babel/runtime" "^7.19.0" + "@parcel/watcher@2.0.4": version "2.0.4" resolved "https://registry.yarnpkg.com/@parcel/watcher/-/watcher-2.0.4.tgz#f300fef4cc38008ff4b8c29d92588eced3ce014b" @@ -1869,6 +3083,126 @@ resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw== +"@protobufs/amino@^0.0.11": + version "0.0.11" + resolved "https://registry.yarnpkg.com/@protobufs/amino/-/amino-0.0.11.tgz#5eb6b8193da8ea818484818dff2800a981cb7a61" + integrity sha512-JRIkW6/YGIUfbdDuASt3wsuxzC0Xj3U2sV0Arqa9iNwCvv4HtOpbqdWVVVgvQBnR0/ZkwQeXnt+GH7yT/DvsYQ== + dependencies: + "@protobufs/google" "^0.0.10" + +"@protobufs/confio@^0.0.6": + version "0.0.6" + resolved "https://registry.yarnpkg.com/@protobufs/confio/-/confio-0.0.6.tgz#a6ddf44eca2cbe535384228312ae7ef5dff29644" + integrity sha512-abZ0ntTJBuB8q2aMBvOerAFk8CSzafB09YdttKFEqwxokZsLFZ3+o7YaH3RIk863oeM//8sonwTaxRV8r4rmSA== + +"@protobufs/cosmos@^0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@protobufs/cosmos/-/cosmos-0.1.0.tgz#492251de16be3e0a89820f48637cd3f42114f24c" + integrity sha512-L3NZ+z0kI6GMTiD2HASNe3WbopPhQlaQaKZNRue+8LiGEv/vbbxD1lox8cwOqes3AN5dHiT0i3+gvzIbKBb7gw== + dependencies: + "@protobufs/amino" "^0.0.11" + "@protobufs/cosmos_proto" "^0.0.10" + "@protobufs/gogoproto" "^0.0.10" + "@protobufs/google" "^0.0.10" + "@protobufs/tendermint" "^0.0.10" + +"@protobufs/cosmos_proto@^0.0.10": + version "0.0.10" + resolved "https://registry.yarnpkg.com/@protobufs/cosmos_proto/-/cosmos_proto-0.0.10.tgz#622726ee227f220f608df180f938e5d8ebb1534a" + integrity sha512-4nMopXxN23udy1HEe+vS49zD9dxrA7i0E3n15QUz1x0tbrowYLHzJKeyCUNlsh5PKpEIXGxHXpPZWXs7vVCwUw== + dependencies: + "@protobufs/google" "^0.0.10" + +"@protobufs/cosmwasm@^0.1.1": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@protobufs/cosmwasm/-/cosmwasm-0.1.1.tgz#313466d02ae7c010f64e6dd6edc06cdc1e676084" + integrity sha512-7rRkQVo7zWjTX+WKdtgM72KEnioGumA7ivYG+3ZGs/uuf8rwN8PmdWFYhLbMkSbPbyJf1bFSyjpP/+kxpH71ew== + dependencies: + "@protobufs/cosmos" "^0.1.0" + "@protobufs/cosmos_proto" "^0.0.10" + "@protobufs/gogoproto" "^0.0.10" + "@protobufs/google" "^0.0.10" + +"@protobufs/gogoproto@^0.0.10": + version "0.0.10" + resolved "https://registry.yarnpkg.com/@protobufs/gogoproto/-/gogoproto-0.0.10.tgz#0181e17142c800b60c7ca5f92c76a614d86c5c54" + integrity sha512-u3eK1aSO3KOuX4RVFpqKPTaT/WLV50GFLuIC3slVGfD7Z1CfZ5ivHbFYUib96gihu1Mq2OZpNVj3dNws9YsVoQ== + dependencies: + "@protobufs/google" "^0.0.10" + +"@protobufs/google@^0.0.10": + version "0.0.10" + resolved "https://registry.yarnpkg.com/@protobufs/google/-/google-0.0.10.tgz#820f741b0c53f688550c74c7ddb25a5ee131a6bf" + integrity sha512-3yo+liabFM1519smwwfzh1C535CntXVsS7zT98xmo21tZUX7vxeFpQDMx38EzMGYSy/Reo8wEMWJUHqZzYsCUw== + +"@protobufs/ibc@^0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@protobufs/ibc/-/ibc-0.1.0.tgz#36aeadc9f09d185d683f66a650dad9dc40437875" + integrity sha512-GmGkX81yyd55Tm34SCOmcOiB0QRwFBHGmZpDRAsks33TBx4efAtT9rKAdtn/oPujx9sha1TqU2s3trnMPVvKyg== + dependencies: + "@protobufs/amino" "^0.0.11" + "@protobufs/confio" "^0.0.6" + "@protobufs/cosmos" "^0.1.0" + "@protobufs/gogoproto" "^0.0.10" + "@protobufs/google" "^0.0.10" + "@protobufs/tendermint" "^0.0.10" + +"@protobufs/tendermint@^0.0.10": + version "0.0.10" + resolved "https://registry.yarnpkg.com/@protobufs/tendermint/-/tendermint-0.0.10.tgz#816b27410afcecd8b6d403df149f3c2b9b80655e" + integrity sha512-hAAMLFhKdAovslKeWnLTp2gGn5bxSTDVcQLKs4C4cC91R/KfHOh+Klt4PqSGUv/APINAmREzsX2LDUbIQ2dCpg== + dependencies: + "@protobufs/gogoproto" "^0.0.10" + "@protobufs/google" "^0.0.10" + +"@pyramation/json-schema-ref-parser@9.0.6": + version "9.0.6" + resolved "https://registry.yarnpkg.com/@pyramation/json-schema-ref-parser/-/json-schema-ref-parser-9.0.6.tgz#556e416ce7dcc15a3c1afd04d6a059e03ed09aeb" + integrity sha512-L5kToHAEc1Q87R8ZwWFaNa4tPHr8Hnm+U+DRdUVq3tUtk+EX4pCqSd34Z6EMxNi/bjTzt1syAG9J2Oo1YFlqSg== + dependencies: + "@jsdevtools/ono" "^7.1.3" + call-me-maybe "^1.0.1" + js-yaml "^3.13.1" + +"@pyramation/json-schema-to-typescript@ 11.0.4": + version "11.0.4" + resolved "https://registry.yarnpkg.com/@pyramation/json-schema-to-typescript/-/json-schema-to-typescript-11.0.4.tgz#959bdb631dad336e1fdbf608a9b5908ab0da1d6b" + integrity sha512-+aSzXDLhMHOEdV2cJ7Tjg/9YenjHU5BCmClVygzwxJZ1R16NOfEn7lTAwVzb/2jivOSnhjHzMJbnSf8b6rd1zg== + dependencies: + "@pyramation/json-schema-ref-parser" "9.0.6" + "@types/json-schema" "^7.0.11" + "@types/lodash" "^4.14.182" + "@types/prettier" "^2.6.1" + cli-color "^2.0.2" + get-stdin "^8.0.0" + glob "^7.1.6" + glob-promise "^4.2.2" + is-glob "^4.0.3" + lodash "^4.17.21" + minimist "^1.2.6" + mkdirp "^1.0.4" + mz "^2.7.0" + prettier "^2.6.2" + +"@pyramation/protobufjs@6.11.5": + version "6.11.5" + resolved "https://registry.yarnpkg.com/@pyramation/protobufjs/-/protobufjs-6.11.5.tgz#c64904a7214f2d061de53eed166c882a369731c4" + integrity sha512-gr+Iv6d7Iwq3PmNsTeQtL6TUONJs0WqbHFikett4zLquRK7egWuifZSKsqV8+o1UBNZcv52Z1HhgwTqNJe75Ag== + dependencies: + "@protobufjs/aspromise" "^1.1.2" + "@protobufjs/base64" "^1.1.2" + "@protobufjs/codegen" "^2.0.4" + "@protobufjs/eventemitter" "^1.1.0" + "@protobufjs/fetch" "^1.1.0" + "@protobufjs/float" "^1.0.2" + "@protobufjs/inquire" "^1.1.0" + "@protobufjs/path" "^1.1.2" + "@protobufjs/pool" "^1.1.0" + "@protobufjs/utf8" "^1.1.0" + "@types/long" "^4.0.1" + "@types/node" ">=13.7.0" + long "^4.0.0" + "@rollup/pluginutils@^4.2.0": version "4.2.1" resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-4.2.1.tgz#e6c6c3aba0744edce3fb2074922d3776c0af2a6d" @@ -1891,6 +3225,11 @@ resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.2.0.tgz#8be36a1f66f3265389e90b5f9c9962146758f728" integrity sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg== +"@sinclair/typebox@^0.24.1": + version "0.24.51" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.24.51.tgz#645f33fe4e02defe26f2f5c0410e1c094eac7f5f" + integrity sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA== + "@tootallnate/once@2": version "2.0.0" resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf" @@ -1913,16 +3252,65 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.0.tgz#5fb2e536c1ae9bf35366eed879e827fa59ca41c2" integrity sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ== -"@types/json-schema@^7.0.9": +"@types/glob@^7.1.3": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.2.0.tgz#bc1b5bf3aa92f25bd5dd39f35c57361bdce5b2eb" + integrity sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA== + dependencies: + "@types/minimatch" "*" + "@types/node" "*" + +"@types/graceful-fs@^4.1.3": + version "4.1.6" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.6.tgz#e14b2576a1c25026b7f02ede1de3b84c3a1efeae" + integrity sha512-Sig0SNORX9fdW+bQuTEovKj3uHcUL6LQKbCrrqb1X7J6/ReAbhCXRAhc+SMejhLELFj2QcyuxmUooZ4bt5ReSw== + dependencies: + "@types/node" "*" + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/json-schema@^7.0.11", "@types/json-schema@^7.0.9": version "7.0.11" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== +"@types/lodash@^4.14.182": + version "4.14.191" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.191.tgz#09511e7f7cba275acd8b419ddac8da9a6a79e2fa" + integrity sha512-BdZ5BCCvho3EIXw6wUCXHe7rS53AIDPLE+JzwgT+OsJk53oBfbSmZZ7CX4VaRoN78N+TJpFi9QPlfIVNmJYWxQ== + "@types/long@^4.0.1": version "4.0.2" resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.2.tgz#b74129719fc8d11c01868010082d483b7545591a" integrity sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA== +"@types/marked@^4.0.8": + version "4.0.8" + resolved "https://registry.yarnpkg.com/@types/marked/-/marked-4.0.8.tgz#b316887ab3499d0a8f4c70b7bd8508f92d477955" + integrity sha512-HVNzMT5QlWCOdeuBsgXP8EZzKUf0+AXzN+sLmjvaB3ZlLqO+e4u0uXrdw9ub69wBKFs+c6/pA4r9sy6cCDvImw== + +"@types/minimatch@*": + version "5.1.2" + resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-5.1.2.tgz#07508b45797cb81ec3f273011b054cd0755eddca" + integrity sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA== + "@types/minimatch@^3.0.3": version "3.0.5" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.5.tgz#1001cc5e6a3704b83c236027e77f2f58ea010f40" @@ -1938,6 +3326,11 @@ resolved "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.31.tgz#31b7ca6407128a3d2bbc27fe2d21b345397f6197" integrity sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA== +"@types/node@*": + version "18.14.2" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.14.2.tgz#c076ed1d7b6095078ad3cf21dfeea951842778b1" + integrity sha512-1uEQxww3DaghA0RxqHx0O0ppVlo43pJhepY51OxuQIKHpjbnYLA7vcdwioNPzIqmC2u3I/dmylcqjlh0e7AyUA== + "@types/node@>=13.7.0": version "18.13.0" resolved "https://registry.yarnpkg.com/@types/node/-/node-18.13.0.tgz#0400d1e6ce87e9d3032c19eb6c58205b0d3f7850" @@ -1953,11 +3346,26 @@ resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301" integrity sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw== +"@types/numeral@^2.0.2": + version "2.0.2" + resolved "https://registry.yarnpkg.com/@types/numeral/-/numeral-2.0.2.tgz#8ea2c4f4e64c0cc948ad7da375f6f827778a7912" + integrity sha512-A8F30k2gYJ/6e07spSCPpkuZu79LCnkPTvqmIWQzNGcrzwFKpVOydG41lNt5wZXjSI149qjyzC2L1+F2PD/NUA== + "@types/parse-json@^4.0.0": version "4.0.0" resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== +"@types/parse-package-name@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@types/parse-package-name/-/parse-package-name-0.1.0.tgz#a4e54e3eef677d8b9d931b54b94ed77e8ae52a4f" + integrity sha512-+vF4M3Cd3Ec22Uwb+OKhDrSAcXQ5I6evRx+1letx4KzfzycU+AOEDHnCifus8In11i8iYNFXPfzg9HWTcC1h+Q== + +"@types/prettier@^2.6.1": + version "2.7.2" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.2.tgz#6c2324641cc4ba050a8c710b2b251b377581fbf0" + integrity sha512-KufADq8uQqo1pYKVIYzfKbJfBAc0sOeXqGbFaSpv8MRmC/zXgowNZmFcbngndGk922QDmOASEXUZCaY48gs4cg== + "@types/semver@^7.3.12": version "7.3.13" resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.3.13.tgz#da4bfd73f49bd541d28920ab0e2bf0ee80f71c91" @@ -1968,6 +3376,18 @@ resolved "https://registry.yarnpkg.com/@types/web-bluetooth/-/web-bluetooth-0.0.16.tgz#1d12873a8e49567371f2a75fe3e7f7edca6662d8" integrity sha512-oh8q2Zc32S6gd/j50GowEjKLoOVOwHP/bWVjKJInBwQqdOYMdPrf1oVlelTlyfFK3CKxL1uahMDAr+vy8T7yMQ== +"@types/yargs-parser@*": + version "21.0.0" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + +"@types/yargs@^17.0.8": + version "17.0.22" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.22.tgz#7dd37697691b5f17d020f3c63e7a45971ff71e9a" + integrity sha512-pet5WJ9U8yPVRhkwuEIp5ktAeAqRZOq4UdAyWLWzxbtpyXnzbtLdKiXAjJzi/KLmPGS9wk86lUFWZFN6sISo4g== + dependencies: + "@types/yargs-parser" "*" + "@typescript-eslint/eslint-plugin@^5.0.0": version "5.50.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.50.0.tgz#fb48c31cadc853ffc1dc35373f56b5e2a8908fe9" @@ -2415,6 +3835,16 @@ ansi-colors@^4.1.1: resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.3.tgz#37611340eb2243e70cc604cad35d63270d48781b" integrity sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw== +ansi-escapes@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-2.0.0.tgz#5bae52be424878dd9783e8910e3fc2922e83c81b" + integrity sha512-tH/fSoQp4DrEodDK3QpdiWiZTSe7sBJ9eOqcQBZ0o9HTM+5M/viSEn+sPMoTuPjQQ8n++w3QJoPEjt8LVPcrCg== + +ansi-escapes@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" + integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== + ansi-escapes@^4.2.1: version "4.3.2" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" @@ -2422,11 +3852,31 @@ ansi-escapes@^4.2.1: dependencies: type-fest "^0.21.3" +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + integrity sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA== + +ansi-regex@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" + integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw== + +ansi-regex@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.1.tgz#164daac87ab2d6f6db3a29875e2d1766582dabed" + integrity sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g== + ansi-regex@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== +ansi-styles@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + integrity sha512-kmCevFghRiWM7HB5zTPULl4r9bVFSWjz62MhqizDGUrq2NWuNMQyuv4tHHoKJHs69M/MF64lEcHdYIocrdWQYA== + ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" @@ -2441,7 +3891,12 @@ ansi-styles@^4.0.0, ansi-styles@^4.1.0: dependencies: color-convert "^2.0.1" -anymatch@~3.1.2: +any-promise@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== + +anymatch@^3.0.3, anymatch@~3.1.2: version "3.1.3" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== @@ -2449,6 +3904,18 @@ anymatch@~3.1.2: normalize-path "^3.0.0" picomatch "^2.0.4" +apexcharts@^3.37.1: + version "3.37.1" + resolved "https://registry.yarnpkg.com/apexcharts/-/apexcharts-3.37.1.tgz#50443d302fc7fc72aace9c6c4074baae017c6950" + integrity sha512-fmQ5Updeb/LASl+S1+mIxXUFxzY0Fa7gexfCs4o+OPP9f2NEBNjvybOtPrah44N4roK7U5o5Jis906QeEQu0cA== + dependencies: + svg.draggable.js "^2.2.2" + svg.easing.js "^2.0.0" + svg.filter.js "^2.0.2" + svg.pathmorphing.js "^0.1.3" + svg.resize.js "^1.4.3" + svg.select.js "^3.0.1" + "aproba@^1.0.3 || ^2.0.0", aproba@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.0.0.tgz#52520b8ae5b569215b354efc0caa3fe1e45a8adc" @@ -2504,6 +3971,13 @@ asap@^2.0.0: resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== +ast-stringify@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/ast-stringify/-/ast-stringify-0.1.0.tgz#5c6439fbfb4513dcc26c7d34464ccd084ed91cb7" + integrity sha512-J1PgFYV3RG6r37+M6ySZJH406hR82okwGvFM9hLXpOvdx4WC4GEW8/qiw6pi1hKTrqcRvoHP8a7mp87egYr6iA== + dependencies: + "@babel/runtime" "^7.11.2" + ast-walker-scope@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/ast-walker-scope/-/ast-walker-scope-0.3.1.tgz#f137801cfaec341da64584efc6e2045c5324c01d" @@ -2556,6 +4030,49 @@ axios@^1.0.0, axios@^1.3.2: form-data "^4.0.0" proxy-from-env "^1.1.0" +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-polyfill-corejs2@^0.3.2, babel-plugin-polyfill-corejs2@^0.3.3: + version "0.3.3" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz#5d1bd3836d0a19e1b84bbf2d9640ccb6f951c122" + integrity sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q== + dependencies: + "@babel/compat-data" "^7.17.7" + "@babel/helper-define-polyfill-provider" "^0.3.3" + semver "^6.1.1" + +babel-plugin-polyfill-corejs3@^0.5.3: + version "0.5.3" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.3.tgz#d7e09c9a899079d71a8b670c6181af56ec19c5c7" + integrity sha512-zKsXDh0XjnrUEW0mxIHLfjBfnXSMr5Q/goMe/fxpQnLm07mcOZiIZHBNWCMx60HmdvjxfXcalac0tfFg0wqxyw== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.2" + core-js-compat "^3.21.0" + +babel-plugin-polyfill-corejs3@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.6.0.tgz#56ad88237137eade485a71b52f72dbed57c6230a" + integrity sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + core-js-compat "^3.25.1" + +babel-plugin-polyfill-regenerator@^0.4.0, babel-plugin-polyfill-regenerator@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz#390f91c38d90473592ed43351e801a9d3e0fd747" + integrity sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + balanced-match@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" @@ -2644,7 +4161,7 @@ brorand@^1.1.0: resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" integrity sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w== -browserslist@^4.0.0, browserslist@^4.16.6, browserslist@^4.21.3, browserslist@^4.21.4: +browserslist@^4.0.0, browserslist@^4.16.6, browserslist@^4.21.3, browserslist@^4.21.4, browserslist@^4.21.5: version "4.21.5" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.5.tgz#75c5dae60063ee641f977e00edd3cfb2fb7af6a7" integrity sha512-tUkiguQGW7S3IhB7N+c2MV/HZPSCPAAiYBZXLsBhFB/PCy6ZKKsZrmBayHV9fdGV/ARIfJ14NkxKzRDjvp7L6w== @@ -2654,6 +4171,13 @@ browserslist@^4.0.0, browserslist@^4.16.6, browserslist@^4.21.3, browserslist@^4 node-releases "^2.0.8" update-browserslist-db "^1.0.10" +bser@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + buffer-from@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" @@ -2730,6 +4254,11 @@ call-bind@^1.0.0, call-bind@^1.0.2: function-bind "^1.1.1" get-intrinsic "^1.0.2" +call-me-maybe@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.2.tgz#03f964f19522ba643b1b0693acb9152fe2074baa" + integrity sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ== + callsites@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" @@ -2769,7 +4298,23 @@ caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001449: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001450.tgz#022225b91200589196b814b51b1bbe45144cf74f" integrity sha512-qMBmvmQmFXaSxexkjjfMvD5rnDL0+m+dUMZKoDYsGG8iZN29RuYh9eRoMvKsT6uMAWlyUUGDEQGJJYjzCIO9ew== -chalk@^2.0.0, chalk@^2.4.1: +case@1.6.3: + version "1.6.3" + resolved "https://registry.yarnpkg.com/case/-/case-1.6.3.tgz#0a4386e3e9825351ca2e6216c60467ff5f1ea1c9" + integrity sha512-mzDSXIPaFwVDvZAHqZ9VlbyF4yyXRuX6IvB06WvPYkqJVO24kX1PPhv9bfpKNFZyxYFmmgo03HUiD8iklmJYRQ== + +chalk@^1.0.0, chalk@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + integrity sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A== + dependencies: + ansi-styles "^2.2.1" + escape-string-regexp "^1.0.2" + has-ansi "^2.0.0" + strip-ansi "^3.0.0" + supports-color "^2.0.0" + +chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== @@ -2786,6 +4331,11 @@ chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.1: ansi-styles "^4.1.0" supports-color "^7.1.0" +chardet@^0.4.0: + version "0.4.2" + resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.4.2.tgz#b5473b33dc97c424e5d98dc87d55d4d8a29c8bf2" + integrity sha512-j/Toj7f1z98Hh2cYo2BVr85EpIRWqUi7rtRSGxh/cqUjqrnJe9l9UE7IUGd2vQ2p+kSHLkSzObQPZPLUC6TQwg== + chardet@^0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" @@ -2816,11 +4366,27 @@ ci-info@^2.0.0: resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== +ci-info@^3.2.0: + version "3.8.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91" + integrity sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw== + clean-stack@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== +cli-color@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/cli-color/-/cli-color-2.0.3.tgz#73769ba969080629670f3f2ef69a4bf4e7cc1879" + integrity sha512-OkoZnxyC4ERN3zLzZaY9Emb7f/MhBOIpePv0Ycok0fJYT+Ouo00UBEIwsVsr0yoow++n5YWlSUgST9GKhNHiRQ== + dependencies: + d "^1.0.1" + es5-ext "^0.10.61" + es6-iterator "^2.0.3" + memoizee "^0.4.15" + timers-ext "^0.1.7" + cli-cursor@3.1.0, cli-cursor@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" @@ -2828,6 +4394,13 @@ cli-cursor@3.1.0, cli-cursor@^3.1.0: dependencies: restore-cursor "^3.1.0" +cli-cursor@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" + integrity sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw== + dependencies: + restore-cursor "^2.0.0" + cli-spinners@2.6.1: version "2.6.1" resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.6.1.tgz#adc954ebe281c37a6319bfa401e6dd2488ffb70d" @@ -2838,6 +4411,11 @@ cli-spinners@^2.5.0: resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.7.0.tgz#f815fd30b5f9eaac02db604c7a231ed7cb2f797a" integrity sha512-qu3pN8Y3qHNgE2AFweciB1IfMnmZ/fsNTEE+NOFjmGB2F/7rLhnhzppvpCnN4FovtP26k8lHyy9ptEbNwWFLzw== +cli-width@^2.0.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.1.tgz#b0433d0b4e9c847ef18868a4ef16fd5fc8271c48" + integrity sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw== + cli-width@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" @@ -2921,6 +4499,11 @@ colorette@^2.0.19: resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798" integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== +colors@^1.1.2: + version "1.4.0" + resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" + integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== + columnify@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/columnify/-/columnify-1.6.0.tgz#6989531713c9008bb29735e61e37acf5bd553cf3" @@ -3074,11 +4657,18 @@ conventional-recommended-bump@^6.1.0: meow "^8.0.0" q "^1.5.1" -convert-source-map@^1.7.0: +convert-source-map@^1.4.0, convert-source-map@^1.7.0: version "1.9.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== +core-js-compat@^3.21.0, core-js-compat@^3.22.1, core-js-compat@^3.25.1: + version "3.29.0" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.29.0.tgz#1b8d9eb4191ab112022e7f6364b99b65ea52f528" + integrity sha512-ScMn3uZNAFhK2DGoEfErguoiAHhV2Ju+oJo/jK08p7B3f3UhocUrCCkTvnZaiS+edl5nlIoiBXKcwMc6elv4KQ== + dependencies: + browserslist "^4.21.5" + core-util-is@~1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" @@ -3230,7 +4820,15 @@ csstype@^2.6.8: resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.6.21.tgz#2efb85b7cc55c80017c66a5ad7cbd931fda3a90e" integrity sha512-Z1PhmomIfypOpoMjRQB70jfvy/wxT50qW08YXO5lMIJkrdq4yOTR+AW7FqutScmB9NkLwxo+jU+kZLbofZZq/w== -dargs@^7.0.0: +d@1, d@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" + integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== + dependencies: + es5-ext "^0.10.50" + type "^1.0.1" + +dargs@7.0.0, dargs@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/dargs/-/dargs-7.0.0.tgz#04015c41de0bcb69ec84050f3d9be0caf8d6d5cc" integrity sha512-2iy1EkLdlBzQGvbweYRFxmFath8+K7+AKB0TlhHWkNuH+TmovaMH/Wp7V7R4u7f4SnX3OgLsU9t1NI9ioDnUpg== @@ -3303,6 +4901,11 @@ deep-is@^0.1.3: resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== +deepmerge@4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + defaults@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a" @@ -3439,6 +5042,11 @@ dotenv@~10.0.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== +dotty@0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/dotty/-/dotty-0.1.2.tgz#512d44cc4111a724931226259297f235e8484f6f" + integrity sha512-V0EWmKeH3DEhMwAZ+8ZB2Ao4OK6p++Z0hsDtZq3N0+0ZMVqkzrcEGROvOnZpLnvBg5PTNG23JEDLAm64gPaotQ== + duplexer@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" @@ -3610,6 +5218,42 @@ es-to-primitive@^1.2.1: is-date-object "^1.0.1" is-symbol "^1.0.2" +es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50, es5-ext@^0.10.53, es5-ext@^0.10.61, es5-ext@~0.10.14, es5-ext@~0.10.2, es5-ext@~0.10.46: + version "0.10.62" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.62.tgz#5e6adc19a6da524bf3d1e02bbc8960e5eb49a9a5" + integrity sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA== + dependencies: + es6-iterator "^2.0.3" + es6-symbol "^3.1.3" + next-tick "^1.1.0" + +es6-iterator@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" + integrity sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g== + dependencies: + d "1" + es5-ext "^0.10.35" + es6-symbol "^3.1.1" + +es6-symbol@^3.1.1, es6-symbol@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" + integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== + dependencies: + d "^1.0.1" + ext "^1.1.2" + +es6-weak-map@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.3.tgz#b6da1f16cc2cc0d9be43e6bdbfc5e7dfcdf31d53" + integrity sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA== + dependencies: + d "1" + es5-ext "^0.10.46" + es6-iterator "^2.0.3" + es6-symbol "^3.1.1" + esbuild@^0.16.14: version "0.16.17" resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.16.17.tgz#fc2c3914c57ee750635fee71b89f615f25065259" @@ -3643,7 +5287,7 @@ escalade@^3.1.1: resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== -escape-string-regexp@^1.0.5: +escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== @@ -3844,6 +5488,14 @@ esutils@^2.0.2: resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== +event-emitter@^0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" + integrity sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA== + dependencies: + d "1" + es5-ext "~0.10.14" + eventemitter3@^4.0.4: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" @@ -3864,6 +5516,22 @@ execa@^5.0.0: signal-exit "^3.0.3" strip-final-newline "^2.0.0" +ext@^1.1.2: + version "1.7.0" + resolved "https://registry.yarnpkg.com/ext/-/ext-1.7.0.tgz#0ea4383c0103d60e70be99e9a7f11027a33c4f5f" + integrity sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw== + dependencies: + type "^2.7.2" + +external-editor@^2.0.4: + version "2.2.0" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.2.0.tgz#045511cfd8d133f3846673d1047c154e214ad3d5" + integrity sha512-bSn6gvGxKt+b7+6TKEv1ZycHleA7aHhRHyAqJyp5pbUFuYYNIzpZnQDk7AsYckyWdEnTeAnay0aCy2aV6iTk9A== + dependencies: + chardet "^0.4.0" + iconv-lite "^0.4.17" + tmp "^0.0.33" + external-editor@^3.0.3: version "3.1.0" resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" @@ -3930,6 +5598,13 @@ fastq@^1.6.0: dependencies: reusify "^1.0.4" +fb-watchman@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== + dependencies: + bser "2.1.1" + figures@3.2.0, figures@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" @@ -3937,6 +5612,13 @@ figures@3.2.0, figures@^3.0.0: dependencies: escape-string-regexp "^1.0.5" +figures@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" + integrity sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA== + dependencies: + escape-string-regexp "^1.0.5" + file-entry-cache@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" @@ -4065,7 +5747,7 @@ fs.realpath@^1.0.0: resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== -fsevents@~2.3.2: +fsevents@^2.3.2, fsevents@~2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== @@ -4090,6 +5772,11 @@ functions-have-names@^1.2.2: resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== +fuzzy@0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/fuzzy/-/fuzzy-0.1.3.tgz#4c76ec2ff0ac1a36a9dccf9a00df8623078d4ed8" + integrity sha512-/gZffu4ykarLrCiP3Ygsa86UAo1E5vEVlvTrpkKywXSbP9Xhln3oSp9QSV57gEq3JFFpGJ4GZ+5zdEp3FcUh4w== + gauge@^4.0.3: version "4.0.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-4.0.4.tgz#52ff0652f2bbf607a989793d53b751bef2328dce" @@ -4123,6 +5810,11 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: has "^1.0.3" has-symbols "^1.0.3" +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + get-pkg-repo@^4.0.0: version "4.2.1" resolved "https://registry.yarnpkg.com/get-pkg-repo/-/get-pkg-repo-4.2.1.tgz#75973e1c8050c73f48190c52047c4cee3acbf385" @@ -4138,6 +5830,11 @@ get-port@^5.1.1: resolved "https://registry.yarnpkg.com/get-port/-/get-port-5.1.1.tgz#0469ed07563479de6efb986baf053dcd7d4e3193" integrity sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ== +get-stdin@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-8.0.0.tgz#cbad6a73feb75f6eeb22ba9e01f89aa28aa97a53" + integrity sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg== + get-stream@^6.0.0: version "6.0.1" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" @@ -4227,6 +5924,13 @@ glob-parent@^6.0.2: dependencies: is-glob "^4.0.3" +glob-promise@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/glob-promise/-/glob-promise-4.2.2.tgz#15f44bcba0e14219cd93af36da6bb905ff007877" + integrity sha512-xcUzJ8NWN5bktoTIX7eOclO1Npxd/dyVqUJxlLIDasT4C7KZyqlPIwkdJ0Ypiy3p2ZKahTjK4M9uC3sNSfNMzw== + dependencies: + "@types/glob" "^7.1.3" + glob@7.1.4: version "7.1.4" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255" @@ -4239,7 +5943,18 @@ glob@7.1.4: once "^1.3.0" path-is-absolute "^1.0.0" -glob@^7.1.3, glob@^7.1.4: +glob@8.0.3: + version "8.0.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-8.0.3.tgz#415c6eb2deed9e502c68fa44a272e6da6eeca42e" + integrity sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^5.0.1" + once "^1.3.0" + +glob@^7.0.0, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: version "7.2.3" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== @@ -4311,7 +6026,7 @@ gopd@^1.0.1: dependencies: get-intrinsic "^1.1.3" -graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.6: +graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.10" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== @@ -4338,6 +6053,13 @@ hard-rejection@^2.1.0: resolved "https://registry.yarnpkg.com/hard-rejection/-/hard-rejection-2.1.0.tgz#1c6eda5c1685c63942766d79bb40ae773cecd883" integrity sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA== +has-ansi@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + integrity sha512-C8vBJ8DwUCx19vhm7urhTuUsr4/IyP6l4VzNQDv+ryHQObW3TTTp9yB68WpYgRe2bbaGuZ/se74IqFeVnMnLZg== + dependencies: + ansi-regex "^2.0.0" + has-bigints@^1.0.1, has-bigints@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" @@ -4486,7 +6208,7 @@ humanize-ms@^1.2.1: dependencies: ms "^2.0.0" -iconv-lite@^0.4.24: +iconv-lite@^0.4.17, iconv-lite@^0.4.24: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== @@ -4566,6 +6288,11 @@ inherits@2, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== + ini@^1.3.2, ini@^1.3.4: version "1.3.8" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" @@ -4584,6 +6311,58 @@ init-package-json@^3.0.2: validate-npm-package-license "^3.0.4" validate-npm-package-name "^4.0.0" +inquirer-autocomplete-prompt@^0.11.1: + version "0.11.1" + resolved "https://registry.yarnpkg.com/inquirer-autocomplete-prompt/-/inquirer-autocomplete-prompt-0.11.1.tgz#f90ca9510a4c489882e9be294934bd8c2e575e09" + integrity sha512-VM4eNiyRD4CeUc2cyKni+F8qgHwL9WC4LdOr+mEC85qP/QNsDV+ysVqUrJYhw1TmDQu1QVhc8hbaL7wfk8SJxw== + dependencies: + ansi-escapes "^2.0.0" + chalk "^1.1.3" + figures "^2.0.0" + inquirer "3.1.1" + lodash "^4.17.4" + run-async "^2.3.0" + util "^0.10.3" + +inquirer@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-3.1.1.tgz#87621c4fba4072f48a8dd71c9f9df6f100b2d534" + integrity sha512-H50sHQwgvvaTBd3HpKMVtL/u6LoHDvYym51gd7bGQe/+9HkCE+J0/3N5FJLfd6O6oz44hHewC2Pc2LodzWVafQ== + dependencies: + ansi-escapes "^2.0.0" + chalk "^1.0.0" + cli-cursor "^2.1.0" + cli-width "^2.0.0" + external-editor "^2.0.4" + figures "^2.0.0" + lodash "^4.3.0" + mute-stream "0.0.7" + run-async "^2.2.0" + rx-lite "^4.0.8" + rx-lite-aggregates "^4.0.8" + string-width "^2.0.0" + strip-ansi "^3.0.0" + through "^2.3.6" + +inquirer@^6.0.0: + version "6.5.2" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-6.5.2.tgz#ad50942375d036d327ff528c08bd5fab089928ca" + integrity sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ== + dependencies: + ansi-escapes "^3.2.0" + chalk "^2.4.2" + cli-cursor "^2.1.0" + cli-width "^2.0.0" + external-editor "^3.0.3" + figures "^2.0.0" + lodash "^4.17.12" + mute-stream "0.0.7" + run-async "^2.2.0" + rxjs "^6.4.0" + string-width "^2.1.0" + strip-ansi "^5.1.0" + through "^2.3.6" + inquirer@^8.2.4: version "8.2.5" resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.5.tgz#d8654a7542c35a9b9e069d27e2df4858784d54f8" @@ -4605,6 +6384,15 @@ inquirer@^8.2.4: through "^2.3.6" wrap-ansi "^7.0.0" +inquirerer@0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/inquirerer/-/inquirerer-0.1.3.tgz#ecf91dc672b3bf45211d7f64bf5e8d5e171fd2ad" + integrity sha512-yGgLUOqPxTsINBjZNZeLi3cv2zgxXtw9feaAOSJf2j6AqIT5Uxs5ZOqOrfAf+xP65Sicla1FD3iDxa3D6TsCAQ== + dependencies: + colors "^1.1.2" + inquirer "^6.0.0" + inquirer-autocomplete-prompt "^0.11.1" + internal-slot@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.4.tgz#8551e7baf74a7a6ba5f749cfb16aa60722f0d6f3" @@ -4614,6 +6402,11 @@ internal-slot@^1.0.4: has "^1.0.3" side-channel "^1.0.4" +interpret@^1.0.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" + integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== + ip@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da" @@ -4699,6 +6492,11 @@ is-extglob@^2.1.1: resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + integrity sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w== + is-fullwidth-code-point@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" @@ -4775,6 +6573,11 @@ is-plain-object@^5.0.0: resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q== +is-promise@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" + integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== + is-regex@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" @@ -4901,6 +6704,22 @@ isomorphic-ws@^4.0.1: resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz#55fd4cd6c5e6491e76dc125938dd863f5cd4f2dc" integrity sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w== +istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + +istanbul-lib-instrument@^5.0.4: + version "5.2.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + jake@^10.8.5: version "10.8.5" resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" @@ -4911,6 +6730,51 @@ jake@^10.8.5: filelist "^1.0.1" minimatch "^3.0.4" +jest-haste-map@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-28.1.3.tgz#abd5451129a38d9841049644f34b034308944e2b" + integrity sha512-3S+RQWDXccXDKSWnkHa/dPwt+2qwA8CJzR61w3FoYCvoo3Pn8tvGcysmMF0Bj0EX5RYvAI2EIvC57OmotfdtKA== + dependencies: + "@jest/types" "^28.1.3" + "@types/graceful-fs" "^4.1.3" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.9" + jest-regex-util "^28.0.2" + jest-util "^28.1.3" + jest-worker "^28.1.3" + micromatch "^4.0.4" + walker "^1.0.8" + optionalDependencies: + fsevents "^2.3.2" + +jest-regex-util@^28.0.2: + version "28.0.2" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" + integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== + +jest-util@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-28.1.3.tgz#f4f932aa0074f0679943220ff9cbba7e497028b0" + integrity sha512-XdqfpHwpcSRko/C35uLYFM2emRAltIIKZiJ9eAmhjsj0CqZMa0p1ib0R5fWIqGhn1a103DebTbpqIaP1qCQ6tQ== + dependencies: + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-worker@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-28.1.3.tgz#7e3c4ce3fa23d1bb6accb169e7f396f98ed4bb98" + integrity sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + jiti@^1.16.0, jiti@^1.16.2: version "1.16.2" resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.16.2.tgz#75f7a0a8fde4a0e57e576f7d329491d588db89cf" @@ -4933,7 +6797,7 @@ js-yaml@4.1.0, js-yaml@^4.1.0: dependencies: argparse "^2.0.1" -js-yaml@^3.10.0: +js-yaml@^3.10.0, js-yaml@^3.13.1: version "3.14.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== @@ -4946,6 +6810,11 @@ jsesc@^2.5.1: resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== +jsesc@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== + json-parse-better-errors@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" @@ -5170,6 +7039,11 @@ lodash._reinterpolate@^3.0.0: resolved "https://registry.yarnpkg.com/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d" integrity sha512-xYHt68QRoYGjeeM/XOE1uJtvXQAgvszfBhjV4yvsQH0u2i9I6cI6c6/eG4Hh3UAOVn0y/xAXwmTzEay49Q//HA== +lodash.debounce@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" + integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== + lodash.ismatch@^4.4.0: version "4.4.0" resolved "https://registry.yarnpkg.com/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz#756cb5150ca3ba6f11085a78849645f188f85f37" @@ -5205,7 +7079,7 @@ lodash.uniq@^4.5.0: resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== -lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21: +lodash@^4.17.12, lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.4, lodash@^4.3.0: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -5247,6 +7121,13 @@ lru-cache@^7.4.4, lru-cache@^7.5.1, lru-cache@^7.7.1: resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.1.tgz#8da8d2f5f59827edb388e63e459ac23d6d408fea" integrity sha512-ysxwsnTKdAx96aTRdhDOCQfDgbHnt8SK0KY8SEjO0wHinhWOFTESbjVCMPbU1uGXg/ch4lifqx0wfjOawU2+WA== +lru-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3" + integrity sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ== + dependencies: + es5-ext "~0.10.2" + magic-string@^0.25.7: version "0.25.9" resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.9.tgz#de7f9faf91ef8a1c91d02c2e5314c8277dbcdd1c" @@ -5305,6 +7186,13 @@ make-fetch-happen@^10.0.3, make-fetch-happen@^10.0.6: socks-proxy-agent "^7.0.0" ssri "^9.0.0" +makeerror@1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + map-obj@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" @@ -5315,11 +7203,30 @@ map-obj@^4.0.0: resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-4.3.0.tgz#9304f906e93faae70880da102a9f1df0ea8bb05a" integrity sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ== +md-editor-v3@^2.8.1: + version "2.8.1" + resolved "https://registry.yarnpkg.com/md-editor-v3/-/md-editor-v3-2.8.1.tgz#190ed35d7c9aaa13b8255e0211b83da3ad1e6c23" + integrity sha512-xAPcwIHmL+jR5SNztBXSYN2/9L92dOTVs81QjzJAz9Og1o8HDEQnWcFbwG4aqNLCINPgUBgX370ajF6zc4bHEA== + mdn-data@2.0.14: version "2.0.14" resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== +memoizee@^0.4.15: + version "0.4.15" + resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.4.15.tgz#e6f3d2da863f318d02225391829a6c5956555b72" + integrity sha512-UBWmJpLZd5STPm7PMUlOw/TSy972M+z8gcyQ5veOnSDRREz/0bmpyTfKt3/51DhEBqCZQn1udM/5flcSPYhkdQ== + dependencies: + d "^1.0.1" + es5-ext "^0.10.53" + es6-weak-map "^2.0.3" + event-emitter "^0.3.5" + is-promise "^2.2.2" + lru-queue "^0.1.0" + next-tick "^1.1.0" + timers-ext "^0.1.7" + memory-fs@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" @@ -5380,6 +7287,11 @@ mime-types@^2.1.12: dependencies: mime-db "1.52.0" +mimic-fn@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" + integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== + mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" @@ -5407,6 +7319,13 @@ minimatch@3.0.5: dependencies: brace-expansion "^1.1.7" +minimatch@5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7" + integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg== + dependencies: + brace-expansion "^2.0.1" + minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" @@ -5437,6 +7356,11 @@ minimist-options@4.1.0: is-plain-obj "^1.1.0" kind-of "^6.0.3" +minimist@1.2.6: + version "1.2.6" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: version "1.2.7" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" @@ -5518,7 +7442,7 @@ mkdirp-infer-owner@^2.0.0: infer-owner "^1.0.4" mkdirp "^1.0.3" -mkdirp@^1.0.3, mkdirp@^1.0.4: +mkdirp@1.0.4, mkdirp@^1.0.3, mkdirp@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== @@ -5569,11 +7493,25 @@ multimatch@^5.0.0: arrify "^2.0.1" minimatch "^3.0.4" +mute-stream@0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab" + integrity sha512-r65nCZhrbXXb6dXOACihYApHw2Q6pV0M3V0PSxd74N0+D8nzAdEAITq2oAjA1jVnKI+tGvEBUpqiMh0+rW6zDQ== + mute-stream@0.0.8, mute-stream@~0.0.4: version "0.0.8" resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== +mz@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" + integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== + dependencies: + any-promise "^1.0.0" + object-assign "^4.0.1" + thenify-all "^1.0.0" + nanoid@^3.3.4: version "3.3.4" resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" @@ -5599,6 +7537,11 @@ neo-async@^2.6.0: resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== +next-tick@1, next-tick@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb" + integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ== + nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" @@ -5649,6 +7592,11 @@ node-gyp@^9.0.0: tar "^6.1.2" which "^2.0.2" +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== + node-releases@^2.0.8: version "2.0.10" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.10.tgz#c311ebae3b6a148c89b1813fd7c4d3c024ef537f" @@ -5830,6 +7778,11 @@ nth-check@^2.0.1: dependencies: boolbase "^1.0.0" +numeral@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/numeral/-/numeral-2.0.6.tgz#4ad080936d443c2561aed9f2197efffe25f4e506" + integrity sha512-qaKRmtYPZ5qdw4jWJD6bxEf1FJEqllJrwxCLIm0sQU/A7v2/czigzOb+C2uSiFsa9lBUzeH7M1oK+Q+OLxL3kA== + nx@15.6.3, "nx@>=15.4.2 < 16": version "15.6.3" resolved "https://registry.yarnpkg.com/nx/-/nx-15.6.3.tgz#900087bce38c6e5975660c23ebd41ead1bf54f98" @@ -5871,6 +7824,11 @@ nx@15.6.3, "nx@>=15.4.2 < 16": yargs "^17.6.2" yargs-parser "21.1.1" +object-assign@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + object-inspect@^1.12.2, object-inspect@^1.9.0: version "1.12.3" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" @@ -5906,6 +7864,13 @@ once@^1.3.0, once@^1.4.0: dependencies: wrappy "1" +onetime@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + integrity sha512-oyyPpiMaKARvvcgip+JV+7zci5L8D1W9RZIz2l1o08AM3pfspitVWnPt3mzHcBPp12oYMTy0pqrFs/C+m3EwsQ== + dependencies: + mimic-fn "^1.0.0" + onetime@^5.1.0, onetime@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" @@ -6135,6 +8100,11 @@ parse-json@^5.0.0: json-parse-even-better-errors "^2.3.0" lines-and-columns "^1.1.6" +parse-package-name@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/parse-package-name/-/parse-package-name-1.0.0.tgz#1a108757e4ffc6889d5e78bcc4932a97c097a5a7" + integrity sha512-kBeTUtcj+SkyfaW4+KBe0HtsloBJ/mKTPoxpVdA57GZiPerREsUWJOhVj9anXweFiJkm5y8FG1sxFZkZ0SN6wg== + parse-path@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/parse-path/-/parse-path-7.0.0.tgz#605a2d58d0a749c8594405d8cc3a2bf76d16099b" @@ -6211,7 +8181,7 @@ picocolors@^1.0.0: resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.3.1: +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== @@ -6249,6 +8219,11 @@ pinia@^2.0.28: "@vue/devtools-api" "^6.4.5" vue-demi "*" +pirates@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + pkg-dir@^4.1.0, pkg-dir@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" @@ -6526,6 +8501,11 @@ prettier-linter-helpers@^1.0.0: dependencies: fast-diff "^1.1.2" +prettier@^2.6.2: + version "2.8.4" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.4.tgz#34dd2595629bfbb79d344ac4a91ff948694463c3" + integrity sha512-vIS4Rlc2FNh0BySk3Wkd6xmwxB0FpOndW5fisM5H8hsZSxU2VWVB5CWIkIjWvrHjIhxk2g3bfMKM87zNTrZddw== + prettier@^2.7.1: version "2.8.3" resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.3.tgz#ab697b1d3dd46fb4626fbe2f543afe0cc98d8632" @@ -6756,6 +8736,13 @@ readonly-date@^1.0.0: resolved "https://registry.yarnpkg.com/readonly-date/-/readonly-date-1.0.0.tgz#5af785464d8c7d7c40b9d738cbde8c646f97dcd9" integrity sha512-tMKIV7hlk0h4mO3JTmmVuIlJVXjKk3Sep9Bf5OH0O+758ruuVkUy2J9SttDLm91IEX/WHlXPSpxMGjPj4beMIQ== +rechoir@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" + integrity sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw== + dependencies: + resolve "^1.1.6" + redent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" @@ -6764,11 +8751,30 @@ redent@^3.0.0: indent-string "^4.0.0" strip-indent "^3.0.0" +regenerate-unicode-properties@^10.1.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz#7c3192cab6dd24e21cb4461e5ddd7dd24fa8374c" + integrity sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ== + dependencies: + regenerate "^1.4.2" + +regenerate@^1.4.2: + version "1.4.2" + resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" + integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== + regenerator-runtime@^0.13.11: version "0.13.11" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== +regenerator-transform@^0.15.1: + version "0.15.1" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.1.tgz#f6c4e99fc1b4591f780db2586328e4d9a9d8dc56" + integrity sha512-knzmNAcuyxV+gQCufkYcvOqX/qIIfHLv0u5x79kRxuGojfYVky1f15TzZEu2Avte8QGepvUNTnLskf8E6X6Vyg== + dependencies: + "@babel/runtime" "^7.8.4" + regexp.prototype.flags@^1.4.3: version "1.4.3" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" @@ -6783,6 +8789,25 @@ regexpp@^3.2.0: resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== +regexpu-core@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.3.1.tgz#66900860f88def39a5cb79ebd9490e84f17bcdfb" + integrity sha512-nCOzW2V/X15XpLsK2rlgdwrysrBq+AauCn+omItIz4R1pIcmeot5zvjdmOBRLzEH/CkC6IxMJVmxDe3QcMuNVQ== + dependencies: + "@babel/regjsgen" "^0.8.0" + regenerate "^1.4.2" + regenerate-unicode-properties "^10.1.0" + regjsparser "^0.9.1" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.1.0" + +regjsparser@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" + integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== + dependencies: + jsesc "~0.5.0" + require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" @@ -6805,7 +8830,7 @@ resolve-from@^5.0.0: resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== -resolve@^1.1.7, resolve@^1.10.0, resolve@^1.22.1: +resolve@^1.1.6, resolve@^1.1.7, resolve@^1.10.0, resolve@^1.14.2, resolve@^1.22.1: version "1.22.1" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== @@ -6814,6 +8839,14 @@ resolve@^1.1.7, resolve@^1.10.0, resolve@^1.22.1: path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" +restore-cursor@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" + integrity sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q== + dependencies: + onetime "^2.0.0" + signal-exit "^3.0.2" + restore-cursor@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" @@ -6832,7 +8865,7 @@ reusify@^1.0.4: resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== -rimraf@^3.0.0, rimraf@^3.0.2: +rimraf@3.0.2, rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== @@ -6846,7 +8879,7 @@ rollup@^3.10.0: optionalDependencies: fsevents "~2.3.2" -run-async@^2.4.0: +run-async@^2.2.0, run-async@^2.3.0, run-async@^2.4.0: version "2.4.1" resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455" integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ== @@ -6858,6 +8891,25 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" +rx-lite-aggregates@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz#753b87a89a11c95467c4ac1626c4efc4e05c67be" + integrity sha512-3xPNZGW93oCjiO7PtKxRK6iOVYBWBvtf9QHDfU23Oc+dLIQmAV//UnyXV/yihv81VS/UqoQPk4NegS8EFi55Hg== + dependencies: + rx-lite "*" + +rx-lite@*, rx-lite@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-4.0.8.tgz#0b1e11af8bc44836f04a6407e92da42467b79444" + integrity sha512-Cun9QucwK6MIrp3mry/Y7hqD1oFqTYLQ4pGxaHTjIdaFDWRGGLikqp6u8LcWJnzpoALg9hap+JGk8sFIUuEGNA== + +rxjs@^6.4.0: + version "6.6.7" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" + integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== + dependencies: + tslib "^1.9.0" + rxjs@^7.5.5: version "7.8.0" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.0.tgz#90a938862a82888ff4c7359811a595e14e1e09a4" @@ -6915,7 +8967,7 @@ semver@7.3.4: dependencies: lru-cache "^6.0.0" -semver@^6.0.0, semver@^6.3.0: +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== @@ -6968,6 +9020,15 @@ shell-quote@^1.6.1: resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.0.tgz#20d078d0eaf71d54f43bd2ba14a1b5b9bfa5c8ba" integrity sha512-QHsz8GgQIGKlRi24yFc6a6lN69Idnx634w49ay6+jA5yFh7a1UY+4Rp6HPx/L/1zcEDPEij8cIsiqR6bQsE5VQ== +shelljs@0.8.5: + version "0.8.5" + resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c" + integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow== + dependencies: + glob "^7.0.0" + interpret "^1.0.0" + rechoir "^0.6.2" + side-channel@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" @@ -7121,6 +9182,14 @@ stop-iteration-iterator@^1.0.0: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" +string-width@^2.0.0, string-width@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^4.0.0" + string.prototype.padend@^3.0.0: version "3.1.4" resolved "https://registry.yarnpkg.com/string.prototype.padend/-/string.prototype.padend-3.1.4.tgz#2c43bb3a89eb54b6750de5942c123d6c98dd65b6" @@ -7162,6 +9231,27 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" +strip-ansi@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + integrity sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg== + dependencies: + ansi-regex "^2.0.0" + +strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + integrity sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow== + dependencies: + ansi-regex "^3.0.0" + +strip-ansi@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" + integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== + dependencies: + ansi-regex "^4.1.0" + strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" @@ -7220,6 +9310,11 @@ stylehacks@^5.1.1: browserslist "^4.21.4" postcss-selector-parser "^6.0.4" +supports-color@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + integrity sha512-KKNVtd6pCYgPIKU4cp2733HWYCpplQhddZLBUryaAHou723x+FRzQ5Df824Fj+IyyuiQTRoub4SnIFfIcrp70g== + supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" @@ -7234,6 +9329,13 @@ supports-color@^7.1.0: dependencies: has-flag "^4.0.0" +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" @@ -7244,6 +9346,61 @@ svg-tags@^1.0.0: resolved "https://registry.yarnpkg.com/svg-tags/-/svg-tags-1.0.0.tgz#58f71cee3bd519b59d4b2a843b6c7de64ac04764" integrity sha512-ovssysQTa+luh7A5Weu3Rta6FJlFBBbInjOh722LIt6klpU2/HtdUbszju/G4devcvk8PGt7FCLv5wftu3THUA== +svg.draggable.js@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/svg.draggable.js/-/svg.draggable.js-2.2.2.tgz#c514a2f1405efb6f0263e7958f5b68fce50603ba" + integrity sha512-JzNHBc2fLQMzYCZ90KZHN2ohXL0BQJGQimK1kGk6AvSeibuKcIdDX9Kr0dT9+UJ5O8nYA0RB839Lhvk4CY4MZw== + dependencies: + svg.js "^2.0.1" + +svg.easing.js@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/svg.easing.js/-/svg.easing.js-2.0.0.tgz#8aa9946b0a8e27857a5c40a10eba4091e5691f12" + integrity sha512-//ctPdJMGy22YoYGV+3HEfHbm6/69LJUTAqI2/5qBvaNHZ9uUFVC82B0Pl299HzgH13rKrBgi4+XyXXyVWWthA== + dependencies: + svg.js ">=2.3.x" + +svg.filter.js@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/svg.filter.js/-/svg.filter.js-2.0.2.tgz#91008e151389dd9230779fcbe6e2c9a362d1c203" + integrity sha512-xkGBwU+dKBzqg5PtilaTb0EYPqPfJ9Q6saVldX+5vCRy31P6TlRCP3U9NxH3HEufkKkpNgdTLBJnmhDHeTqAkw== + dependencies: + svg.js "^2.2.5" + +svg.js@>=2.3.x, svg.js@^2.0.1, svg.js@^2.2.5, svg.js@^2.4.0, svg.js@^2.6.5: + version "2.7.1" + resolved "https://registry.yarnpkg.com/svg.js/-/svg.js-2.7.1.tgz#eb977ed4737001eab859949b4a398ee1bb79948d" + integrity sha512-ycbxpizEQktk3FYvn/8BH+6/EuWXg7ZpQREJvgacqn46gIddG24tNNe4Son6omdXCnSOaApnpZw6MPCBA1dODA== + +svg.pathmorphing.js@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/svg.pathmorphing.js/-/svg.pathmorphing.js-0.1.3.tgz#c25718a1cc7c36e852ecabc380e758ac09bb2b65" + integrity sha512-49HWI9X4XQR/JG1qXkSDV8xViuTLIWm/B/7YuQELV5KMOPtXjiwH4XPJvr/ghEDibmLQ9Oc22dpWpG0vUDDNww== + dependencies: + svg.js "^2.4.0" + +svg.resize.js@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/svg.resize.js/-/svg.resize.js-1.4.3.tgz#885abd248e0cd205b36b973c4b578b9a36f23332" + integrity sha512-9k5sXJuPKp+mVzXNvxz7U0uC9oVMQrrf7cFsETznzUDDm0x8+77dtZkWdMfRlmbkEEYvUn9btKuZ3n41oNA+uw== + dependencies: + svg.js "^2.6.5" + svg.select.js "^2.1.2" + +svg.select.js@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/svg.select.js/-/svg.select.js-2.1.2.tgz#e41ce13b1acff43a7441f9f8be87a2319c87be73" + integrity sha512-tH6ABEyJsAOVAhwcCjF8mw4crjXSI1aa7j2VQR8ZuJ37H2MBUbyeqYr5nEO7sSN3cy9AR9DUwNg0t/962HlDbQ== + dependencies: + svg.js "^2.2.5" + +svg.select.js@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/svg.select.js/-/svg.select.js-3.0.1.tgz#a4198e359f3825739226415f82176a90ea5cc917" + integrity sha512-h5IS/hKkuVCbKSieR9uQCj9w+zLHoPh+ce19bBYyqF53g6mnPB8sAtIbe1s9dh2S2fCmYX2xel1Ln3PJBbK4kw== + dependencies: + svg.js "^2.6.5" + svgo@^2.7.0: version "2.8.0" resolved "https://registry.yarnpkg.com/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" @@ -7295,6 +9452,15 @@ temp-dir@^1.0.0: resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-1.0.0.tgz#0a7c0ea26d3a39afa7e0ebea9c1fc0bc4daa011d" integrity sha512-xZFXEGbG7SNC3itwBzI3RYjq/cEhBkx2hJuKGIUOcEULmkQExXiHat2z/qkISYsuR+IKumhEfKKbV5qXmhICFQ== +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + text-extensions@^1.0.0: version "1.9.0" resolved "https://registry.yarnpkg.com/text-extensions/-/text-extensions-1.9.0.tgz#1853e45fee39c945ce6f6c36b2d659b5aabc2a26" @@ -7305,6 +9471,20 @@ text-table@^0.2.0: resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== +thenify-all@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" + integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== + dependencies: + thenify ">= 3.1.0 < 4" + +"thenify@>= 3.1.0 < 4": + version "3.3.1" + resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" + integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== + dependencies: + any-promise "^1.0.0" + through2@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" @@ -7325,6 +9505,14 @@ through@2, "through@>=2.2.7 <3", through@^2.3.4, through@^2.3.6: resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== +timers-ext@^0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.7.tgz#6f57ad8578e07a3fb9f91d9387d65647555e25c6" + integrity sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ== + dependencies: + es5-ext "~0.10.46" + next-tick "1" + tmp@^0.0.33: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" @@ -7339,6 +9527,11 @@ tmp@~0.2.1: dependencies: rimraf "^3.0.0" +tmpl@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" @@ -7375,7 +9568,7 @@ tsconfig-paths@^4.1.2: minimist "^1.2.6" strip-bom "^3.0.0" -tslib@^1.8.1: +tslib@^1.8.1, tslib@^1.9.0: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== @@ -7429,6 +9622,16 @@ type-fest@^0.8.1: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== +type@^1.0.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" + integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== + +type@^2.7.2: + version "2.7.2" + resolved "https://registry.yarnpkg.com/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0" + integrity sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw== + typed-array-length@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.4.tgz#89d83785e5c4098bec72e08b319651f0eac9c1bb" @@ -7485,6 +9688,29 @@ unctx@^2.1.1: magic-string "^0.26.7" unplugin "^1.0.0" +unicode-canonical-property-names-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" + integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== + +unicode-match-property-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" + integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== + dependencies: + unicode-canonical-property-names-ecmascript "^2.0.0" + unicode-property-aliases-ecmascript "^2.0.0" + +unicode-match-property-value-ecmascript@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz#cb5fffdcd16a05124f5a4b0bf7c3770208acbbe0" + integrity sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA== + +unicode-property-aliases-ecmascript@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" + integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== + unimport@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/unimport/-/unimport-2.1.0.tgz#d1c5bfe29f8ef1625816e2f6f3250fa2c1ec28c1" @@ -7610,6 +9836,13 @@ util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== +util@^0.10.3: + version "0.10.4" + resolved "https://registry.yarnpkg.com/util/-/util-0.10.4.tgz#3aa0125bfe668a4672de58857d3ace27ecb76901" + integrity sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A== + dependencies: + inherits "2.0.3" + uuid@^8.3.2: version "8.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" @@ -7738,6 +9971,11 @@ vue-tsc@^1.0.12: "@volar/vue-language-core" "1.0.24" "@volar/vue-typescript" "1.0.24" +vue3-apexcharts@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/vue3-apexcharts/-/vue3-apexcharts-1.4.1.tgz#ea561308430a1c5213b7f17c44ba3c845f6c490d" + integrity sha512-96qP8JDqB9vwU7bkG5nVU+E0UGQn7yYQVqUUCLQMYWDuQyu2vE77H/UFZ1yI+hwzlSTBKT9BqnNG8JsFegB3eg== + vue3-perfect-scrollbar@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/vue3-perfect-scrollbar/-/vue3-perfect-scrollbar-1.6.1.tgz#296e0e0c61a8f6278184f5b09bb45d137af92327" @@ -7768,6 +10006,25 @@ walk-up-path@^1.0.0: resolved "https://registry.yarnpkg.com/walk-up-path/-/walk-up-path-1.0.0.tgz#d4745e893dd5fd0dbb58dd0a4c6a33d9c9fec53e" integrity sha512-hwj/qMDUEjCU5h0xr90KGCf0tg0/LgJbmOWgrWKYlcJZM7XvquvUJZ0G/HMGr7F7OQMOUuPHWP9JpriinkAlkg== +walker@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +wasm-ast-types@^0.15.0: + version "0.15.0" + resolved "https://registry.yarnpkg.com/wasm-ast-types/-/wasm-ast-types-0.15.0.tgz#101f98fc9c5d0528bc615f80d9d7a897168e0593" + integrity sha512-A3wgW3mlqK3irUjHqMkA26ADFA1z55LgQKl+KXRf1ylN5DValI3t/R9Sv3grSa7vpCAeG6E+XWCd7pGRNDsylw== + dependencies: + "@babel/runtime" "^7.18.9" + "@babel/types" "7.18.10" + "@jest/transform" "28.1.3" + ast-stringify "0.1.0" + case "1.6.3" + deepmerge "4.2.2" + wcwidth@^1.0.0, wcwidth@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8"