Compare commits

..

1 Commits

Author SHA1 Message Date
ishavenikar
822c72094d Add support for zenith account (#4)
Part of https://www.notion.so/Implement-stacks-1b5a6b22d472806a82f5dafed6955138

Co-authored-by: IshaVenikar <ishavenikar7@gmail.com>
Reviewed-on: #4
Co-authored-by: ishavenikar <ishavenikar@noreply.git.vdb.to>
Co-committed-by: ishavenikar <ishavenikar@noreply.git.vdb.to>
2025-06-04 09:43:21 +00:00
43 changed files with 9155 additions and 77 deletions

View File

@ -42,7 +42,7 @@ Copy the `.env.sample` file and rename it to `.env.local`
### 2. Run a local cosmos-sdk Simapp instance
It's recommended that you make your simapp instance mimic the denomination of cosmoshub-4 (`uatom`). Put the local address of your node as the value for `NEXT_PUBLIC_NODE_ADDRESSES` in your `.env.local` file.
It's recommended that you make your simapp instance mimic the denomination of cosmoshub-4 (`uatom`). Put the local address of your node as the value for `NEXT_PUBLIC_NODE_ADDRESS` in your `.env.local` file.
A more in depth tutorial on this is coming soon :)
@ -67,3 +67,21 @@ With the simapp process running, run these commands in another window:
npm install
npm run dev
```
### Protobuf
Prerequisite: [protoc](https://protobuf.dev/installation/)
Run following scripts when [proto files](./proto/) are updated.
- Install dependencies:
```bash
yarn
```
- Generate typescript bindings for the proto files:
```bash
./scripts/protocgen.sh
```

View File

@ -2,7 +2,7 @@ import { Table, TableBody, TableCell, TableRow } from "@/components/ui/table";
import { printableCoin, thinSpace } from "@/lib/displayHelpers";
import { toastError } from "@/lib/utils";
import { Coin } from "@cosmjs/amino";
import { StargateClient } from "@cosmjs/stargate";
import { accountFromAny, SigningZenithClient } from "@/utils/cosmos-client.ts";
import { useEffect, useState } from "react";
import { useChains } from "../../context/ChainsContext";
import { Avatar, AvatarFallback, AvatarImage } from "../ui/avatar";
@ -22,7 +22,10 @@ export default function BalancesTable({ walletAddress }: BalancesTableProps) {
}
try {
const client = await StargateClient.connect(chain.nodeAddress);
const client = await SigningZenithClient.connect(chain.nodeAddress, {
accountParser: accountFromAny
});
const newBalances = await client.getAllBalances(walletAddress);
setBalances(newBalances);
} catch (e) {

View File

@ -8,7 +8,7 @@ import { msgTypeCountsFromJson } from "@/lib/txMsgHelpers";
import { cn, toastError } from "@/lib/utils";
import { WalletInfo } from "@/types/signing";
import { toBase64 } from "@cosmjs/encoding";
import { StargateClient } from "@cosmjs/stargate";
import { accountFromAny, SigningZenithClient } from "@/utils/cosmos-client.ts";
import { Loader2, MoveRightIcon } from "lucide-react";
import Image from "next/image";
import Link from "next/link";
@ -40,7 +40,9 @@ export default function ListMultisigTxs({
const getSignature = useCallback(
async (address: string) => {
const client = await StargateClient.connect(chain.nodeAddress);
const client = await SigningZenithClient.connect(chain.nodeAddress, {
accountParser: accountFromAny
});
const accountOnChain = await client.getAccount(address);
if (!accountOnChain) {

View File

@ -6,7 +6,7 @@ import { toastError } from "@/lib/utils";
import { WalletInfo } from "@/types/signing";
import { MultisigThresholdPubkey } from "@cosmjs/amino";
import { toBase64 } from "@cosmjs/encoding";
import { StargateClient } from "@cosmjs/stargate";
import { accountFromAny, SigningZenithClient } from "@/utils/cosmos-client.ts";
import { Loader2, MoveRightIcon } from "lucide-react";
import Image from "next/image";
import Link from "next/link";
@ -27,7 +27,10 @@ export default function ListUserMultisigs() {
const getSignature = useCallback(
async (address: string) => {
const client = await StargateClient.connect(chain.nodeAddress);
const client = await SigningZenithClient.connect(chain.nodeAddress, {
accountParser: accountFromAny
});
const accountOnChain = await client.getAccount(address);
if (!accountOnChain) {

View File

@ -1,8 +1,8 @@
import { ChainInfo } from "@/context/ChainsContext/types";
import { pubkeyToAddress } from "@cosmjs/amino";
import { StargateClient } from "@cosmjs/stargate";
import { z } from "zod";
import { checkAddressOrPubkey } from "../../../lib/displayHelpers";
import { accountFromAny, SigningZenithClient } from "@/utils/cosmos-client.ts";
export const getCreateMultisigSchema = (chain: ChainInfo) =>
z
@ -26,11 +26,14 @@ export const getCreateMultisigSchema = (chain: ChainInfo) =>
const address = member.startsWith(chain.addressPrefix)
? member
: pubkeyToAddress(
{ type: "tendermint/PubKeySecp256k1", value: member },
chain.addressPrefix,
);
{ type: "tendermint/PubKeySecp256k1", value: member },
chain.addressPrefix,
);
const client = await SigningZenithClient.connect(chain.nodeAddress, {
accountParser: accountFromAny
});
const client = await StargateClient.connect(chain.nodeAddress);
const accountOnChain = await client.getAccount(address);
if (!accountOnChain || !accountOnChain.pubkey) {
@ -78,7 +81,7 @@ export const getCreateMultisigSchema = (chain: ChainInfo) =>
);
return address;
} catch {}
} catch { }
}
return member;
@ -122,9 +125,8 @@ export const getCreateMultisigSchema = (chain: ChainInfo) =>
.refine(
({ members, threshold }) => threshold <= members.filter(({ member }) => member !== "").length,
({ members }) => ({
message: `Threshold can't be higher than the number of members (${
members.filter(({ member }) => member !== "").length
})`,
message: `Threshold can't be higher than the number of members (${members.filter(({ member }) => member !== "").length
})`,
path: ["threshold"],
}),
);

View File

@ -11,7 +11,6 @@ import {
import { Input } from "@/components/ui/input";
import { getKeplrKey } from "@/lib/keplr";
import { toastError } from "@/lib/utils";
import { StargateClient } from "@cosmjs/stargate";
import { zodResolver } from "@hookform/resolvers/zod";
import { useRouter } from "next/router";
import { useEffect } from "react";
@ -22,6 +21,7 @@ import { createMultisigFromCompressedSecp256k1Pubkeys } from "../../../lib/multi
import ConfirmCreateMultisig from "./ConfirmCreateMultisig";
import MemberFormField from "./MemberFormField";
import { getCreateMultisigSchema } from "./formSchema";
import { accountFromAny, SigningZenithClient } from "@/utils/cosmos-client.ts";
export default function CreateMultisigForm() {
const router = useRouter();
@ -77,7 +77,10 @@ export default function CreateMultisigForm() {
return member;
}
const client = await StargateClient.connect(chain.nodeAddress);
const client = await SigningZenithClient.connect(chain.nodeAddress, {
accountParser: accountFromAny
});
const accountOnChain = await client.getAccount(member);
if (!accountOnChain || !accountOnChain.pubkey) {

View File

@ -10,8 +10,8 @@ import {
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { ChainInfo } from "@/context/ChainsContext/types";
import { StargateClient } from "@cosmjs/stargate";
import { zodResolver } from "@hookform/resolvers/zod";
import { accountFromAny, SigningZenithClient } from "@/utils/cosmos-client.ts";
import Link from "next/link";
import { NextRouter, withRouter } from "next/router";
import { useForm } from "react-hook-form";
@ -21,7 +21,10 @@ import { exampleAddress } from "../../lib/displayHelpers";
const existsMultisigAccount = async (chain: ChainInfo, address: string) => {
try {
const client = await StargateClient.connect(chain.nodeAddress);
const client = await SigningZenithClient.connect(chain.nodeAddress, {
accountParser: accountFromAny
});
const accountOnChain = await client.getAccount(address);
return accountOnChain !== null;
} catch {

View File

@ -3,7 +3,7 @@ import { toastError } from "@/lib/utils";
import { ReactNode, createContext, useContext, useEffect, useReducer } from "react";
import { emptyChain, isChainInfoFilled, setChain, setChains, setChainsError } from "./helpers";
import { getChain, getNodeFromArray, useChainsFromRegistry } from "./service";
import { addLocalChainInStorage, addRecentChainNameInStorage, getChainFromEnvfile, setChainInUrl } from "./storage";
import { addLocalChainInStorage, addRecentChainNameInStorage, setChainInUrl } from "./storage";
import { Action, ChainsContextType, Dispatch, State } from "./types";
const ChainsContext = createContext<ChainsContextType | undefined>(undefined);
@ -61,8 +61,6 @@ interface ChainsProviderProps {
readonly children: ReactNode;
}
const envfileChain = getChainFromEnvfile('');
export const ChainsProvider = ({ children }: ChainsProviderProps) => {
const [state, dispatch] = useReducer(chainsReducer, {
chain: emptyChain,
@ -72,10 +70,6 @@ export const ChainsProvider = ({ children }: ChainsProviderProps) => {
});
const { chainItems, chainItemsError } = useChainsFromRegistry();
if (isChainInfoFilled(envfileChain)) {
chainItems.localnets.set(envfileChain.registryName, envfileChain);
}
useEffect(() => {
setChains(dispatch, chainItems);

View File

@ -1,6 +1,5 @@
import { getChainsFromRegistry, getShaFromRegistry } from "@/lib/chainRegistry";
import { toastError } from "@/lib/utils";
import { StargateClient } from "@cosmjs/stargate";
import { useEffect, useState } from "react";
import { emptyChain, isChainInfoFilled } from "./helpers";
import {
@ -14,6 +13,7 @@ import {
setShaInStorage,
} from "./storage";
import { ChainItems } from "./types";
import { SigningZenithClient } from "@/utils/cosmos-client.ts";
export const useChainsFromRegistry = () => {
const [chainItems, setChainItems] = useState<ChainItems>({
@ -88,7 +88,7 @@ export const getNodeFromArray = async (nodeArray: readonly string[]) => {
for (const node of secureNodes) {
try {
// test client connection
const client = await StargateClient.connect(node);
const client = await SigningZenithClient.connect(node);
await client.getHeight();
return node;
} catch {}
@ -104,33 +104,19 @@ export const getChain = (chains: ChainItems) => {
// Avoid app from thinking the /api route is a registryName
const chainNameFromUrl = rootRoute === "api" ? "" : rootRoute;
// Get chain only after public chains have been fetched from registry
if (!(chains.mainnets.size || chains.testnets.size)) {
return emptyChain;
}
const recentChain = getRecentChainFromStorage(chains);
if (!chainNameFromUrl && isChainInfoFilled(recentChain)) {
return recentChain;
}
// Set chain if no reccent chain and chain name set in URL
// Check if info set in URL
const urlChain = getChainFromUrl(chainNameFromUrl);
let storedChain = getChainFromStorage(chainNameFromUrl, chains);
let chain = { ...storedChain, ...urlChain };
if (isChainInfoFilled(chain)) {
return chain;
}
// Check if info set in env
const envfileChain = getChainFromEnvfile(chainNameFromUrl);
storedChain = getChainFromStorage(
envfileChain.registryName || "cosmoshub",
const storedChain = getChainFromStorage(
chainNameFromUrl || envfileChain.registryName || "cosmoshub",
chains,
);
chain = { ...storedChain, ...envfileChain };
const chain = { ...storedChain, ...envfileChain, ...urlChain };
return isChainInfoFilled(chain) ? chain : emptyChain;
};

View File

@ -165,7 +165,7 @@ export const getChainFromEnvfile = (chainName: string) => {
const explorerLinksValue: Partial<ExplorerLinks> = JSON.parse(explorerLinks || "{}");
const envfileChain: Partial<ChainInfo> = {
registryName,
registryName: chainName,
...(logo && { logo }),
...(chainId && { chainId }),
...(chainDisplayName && { chainDisplayName }),

View File

@ -16,7 +16,7 @@ const compat = new FlatCompat({
export default [
{
ignores: ["components/ui/"],
ignores: ["components/ui/", "src/proto"],
},
...compat.extends("next/core-web-vitals", "plugin:@typescript-eslint/recommended", "prettier"),
{

View File

@ -11,9 +11,6 @@ const mainnetsUrl = `https://api.github.com/repos/${chainRegistryRepo}/contents`
const testnetsUrl = `https://api.github.com/repos/${chainRegistryRepo}/contents/testnets`;
const registryCdnUrl = `https://cdn.jsdelivr.net/gh/${chainRegistryRepo}@${repoBranch}`;
const registryEnabledChains = process.env.NEXT_PUBLIC_REGISTRY_ENABLED_CHAINS;
const registryEnabledChainsValue: readonly string[] = JSON.parse(registryEnabledChains || "[]");
const getShaFromRegistry = async () => {
const { sha }: { sha: string } = await requestGhJson(shaUrl);
return sha;
@ -39,11 +36,6 @@ const getChainsFromRegistry = async () => {
continue;
}
// Skip if chain is not included in NEXT_PUBLIC_REGISTRY_ENABLED_CHAINS (unset env to fetch all chains)
if (registryEnabledChainsValue.length && !registryEnabledChainsValue.includes(path)) {
continue;
}
mainnetPromisesMap.set(path, {
chainInfo: requestGhJson(`${registryCdnUrl}/${path}/chain.json`),
assetList: requestGhJson(`${registryCdnUrl}/${path}/assetlist.json`),
@ -77,11 +69,6 @@ const getChainsFromRegistry = async () => {
continue;
}
// Skip if chain is not included in NEXT_PUBLIC_REGISTRY_ENABLED_CHAINS (unset env to fetch all chains)
if (registryEnabledChainsValue.length && !registryEnabledChainsValue.includes(path)) {
continue;
}
testnetPromisesMap.set(path, {
chainInfo: requestGhJson(`${registryCdnUrl}/${path}/chain.json`),
assetList: requestGhJson(`${registryCdnUrl}/${path}/assetlist.json`),

View File

@ -6,6 +6,7 @@ import {
pubkeyToAddress,
} from "@cosmjs/amino";
import { Account, StargateClient } from "@cosmjs/stargate";
import { SigningZenithClient } from "@/utils/cosmos-client.ts";
import { createDbMultisig, getDbMultisig } from "./api";
import { checkAddress, explorerLinkAccount } from "./displayHelpers";
@ -93,7 +94,7 @@ export const getHostedMultisig = async (
hostedMultisig = await (async () => {
try {
const client = providedClient ?? (await StargateClient.connect(nodeAddress));
const client = providedClient ?? (await SigningZenithClient.connect(nodeAddress));
const accountOnChain = await client.getAccount(multisigAddress);
if (!accountOnChain) {

67
package-lock.json generated
View File

@ -1,10 +1,12 @@
{
"name": "cosmos-multisig-ui",
"version": "0.1.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "cosmos-multisig-ui",
"version": "0.1.1",
"dependencies": {
"@cosmjs/amino": "^0.33.0",
"@cosmjs/cosmwasm-stargate": "^0.33.0",
@ -86,6 +88,9 @@
"typescript": "5.5.2",
"vanilla-jsoneditor": "^0.23.7",
"zod": "^3.23.8"
},
"devDependencies": {
"ts-proto": "^2.7.1"
}
},
"node_modules/@adobe/css-tools": {
@ -610,6 +615,12 @@
"integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==",
"license": "MIT"
},
"node_modules/@bufbuild/protobuf": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/@bufbuild/protobuf/-/protobuf-2.5.1.tgz",
"integrity": "sha512-lut4UTvKL8tqtend0UDu7R79/n9jA7Jtxf77RNPbxtmWqfWI4qQ9bTjf7KCS4vfqLmpQbuHr1ciqJumAgJODdw==",
"dev": true
},
"node_modules/@codemirror/autocomplete": {
"version": "6.18.4",
"resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.18.4.tgz",
@ -5611,6 +5622,18 @@
"cdl": "bin/cdl.js"
}
},
"node_modules/case-anything": {
"version": "2.1.13",
"resolved": "https://registry.npmjs.org/case-anything/-/case-anything-2.1.13.tgz",
"integrity": "sha512-zlOQ80VrQ2Ue+ymH5OuM/DlDq64mEm+B9UTdHULv5osUMD6HalNTblf2b1u/m6QecjsnOkBpqVZ+XPwIVsy7Ng==",
"dev": true,
"engines": {
"node": ">=12.13"
},
"funding": {
"url": "https://github.com/sponsors/mesqueeb"
}
},
"node_modules/chalk": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
@ -6342,8 +6365,8 @@
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
"integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==",
"devOptional": true,
"license": "Apache-2.0",
"optional": true,
"bin": {
"detect-libc": "bin/detect-libc.js"
},
@ -6429,6 +6452,15 @@
"node": ">=12"
}
},
"node_modules/dprint-node": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/dprint-node/-/dprint-node-1.0.8.tgz",
"integrity": "sha512-iVKnUtYfGrYcW1ZAlfR/F59cUVL8QIhWoBJoSjkkdua/dkWIgjZfiLMeTjiB06X0ZLkQ0M2C1VbUj/CxkIf1zg==",
"dev": true,
"dependencies": {
"detect-libc": "^1.0.3"
}
},
"node_modules/dunder-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
@ -13037,6 +13069,39 @@
"license": "MIT",
"peer": true
},
"node_modules/ts-poet": {
"version": "6.12.0",
"resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-6.12.0.tgz",
"integrity": "sha512-xo+iRNMWqyvXpFTaOAvLPA5QAWO6TZrSUs5s4Odaya3epqofBu/fMLHEWl8jPmjhA0s9sgj9sNvF1BmaQlmQkA==",
"dev": true,
"dependencies": {
"dprint-node": "^1.0.8"
}
},
"node_modules/ts-proto": {
"version": "2.7.1",
"resolved": "https://registry.npmjs.org/ts-proto/-/ts-proto-2.7.1.tgz",
"integrity": "sha512-Ifih7nTnI+N6PtloK8rPmSfHQcTjFeKLIh84jSdWIwA3KCWKcx8pLlR3jZvTgO1Fe0r76nowQ98I4mRSjuQHYw==",
"dev": true,
"dependencies": {
"@bufbuild/protobuf": "^2.0.0",
"case-anything": "^2.1.13",
"ts-poet": "^6.12.0",
"ts-proto-descriptors": "2.0.0"
},
"bin": {
"protoc-gen-ts_proto": "protoc-gen-ts_proto"
}
},
"node_modules/ts-proto-descriptors": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ts-proto-descriptors/-/ts-proto-descriptors-2.0.0.tgz",
"integrity": "sha512-wHcTH3xIv11jxgkX5OyCSFfw27agpInAd6yh89hKG6zqIXnjW9SYqSER2CVQxdPj4czeOhGagNvZBEbJPy7qkw==",
"dev": true,
"dependencies": {
"@bufbuild/protobuf": "^2.0.0"
}
},
"node_modules/tsconfig-paths": {
"version": "3.15.0",
"resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz",

View File

@ -1,7 +1,7 @@
{
"name": "cosmos-multisig-ui",
"private": true,
"version": "0.1.3",
"version": "0.1.2-zenith-0.1.0",
"scripts": {
"dev": "next dev",
"test": "jest --watch",
@ -93,5 +93,8 @@
"typescript": "5.5.2",
"vanilla-jsoneditor": "^0.23.7",
"zod": "^3.23.8"
},
"devDependencies": {
"ts-proto": "^2.7.1"
}
}

View File

@ -5,8 +5,9 @@ import { updateDbTxHash } from "@/lib/api";
import { toastError, toastSuccess } from "@/lib/utils";
import { MultisigThresholdPubkey } from "@cosmjs/amino";
import { fromBase64 } from "@cosmjs/encoding";
import { Account, StargateClient, makeMultisignedTxBytes } from "@cosmjs/stargate";
import { Account, makeMultisignedTxBytes } from "@cosmjs/stargate";
import { assert } from "@cosmjs/utils";
import { SigningZenithClient } from "@/utils/cosmos-client.ts";
import { GetServerSideProps } from "next";
import { useRouter } from "next/router";
import { useEffect, useState } from "react";
@ -125,7 +126,7 @@ const TransactionPage = ({
new Map(currentSignatures.map((s) => [s.address, fromBase64(s.signature)])),
);
const broadcaster = await StargateClient.connect(chain.nodeAddress);
const broadcaster = await SigningZenithClient.connect(chain.nodeAddress);
const result = await broadcaster.broadcastTx(signedTxBytes);
await updateDbTxHash(transactionID, result.transactionHash);
toastSuccess("Transaction broadcasted with hash", result.transactionHash);

View File

@ -2,9 +2,9 @@ import { getBelongedMultisigs, getCreatedMultisigs } from "@/graphql/multisig";
import { getNonce, incrementNonce } from "@/graphql/nonce";
import { GetDbMultisigTxsBody } from "@/lib/api";
import { verifyKeplrSignature } from "@/lib/keplr";
import { SigningZenithClient } from "@/utils/cosmos-client.ts";
import { decodeSignature, pubkeyToAddress } from "@cosmjs/amino";
import { toBase64 } from "@cosmjs/encoding";
import { StargateClient } from "@cosmjs/stargate";
import type { NextApiRequest, NextApiResponse } from "next";
const endpointErrMsg = "Failed to list multisigs";
@ -28,7 +28,7 @@ export default async function apiListMultisigs(req: NextApiRequest, res: NextApi
const address = pubkeyToAddress(body.signature.pub_key, body.chain.addressPrefix);
const client = await StargateClient.connect(body.chain.nodeAddress);
const client = await SigningZenithClient.connect(body.chain.nodeAddress);
const accountOnChain = await client.getAccount(address);
if (!accountOnChain) {

View File

@ -3,9 +3,9 @@ import { getNonce, incrementNonce } from "@/graphql/nonce";
import { getTransactions } from "@/graphql/transaction";
import { GetDbMultisigTxsBody } from "@/lib/api";
import { verifyKeplrSignature } from "@/lib/keplr";
import { SigningZenithClient } from "@/utils/cosmos-client.ts";
import { decodeSignature, pubkeyToAddress } from "@cosmjs/amino";
import { toBase64 } from "@cosmjs/encoding";
import { StargateClient } from "@cosmjs/stargate";
import type { NextApiRequest, NextApiResponse } from "next";
const endpointErrMsg = "Failed to list transactions";
@ -34,7 +34,7 @@ export default async function apiListTransactions(req: NextApiRequest, res: Next
const address = pubkeyToAddress(body.signature.pub_key, body.chain.addressPrefix);
const client = await StargateClient.connect(body.chain.nodeAddress);
const client = await SigningZenithClient.connect(body.chain.nodeAddress);
const accountOnChain = await client.getAccount(address);
if (!accountOnChain) {

75
proto/amino/amino.proto Normal file
View File

@ -0,0 +1,75 @@
syntax = "proto3";
package amino;
import "google/protobuf/descriptor.proto";
// TODO(fdymylja): once we fully migrate to protov2 the go_package needs to be updated.
// We need this right now because gogoproto codegen needs to import the extension.
option go_package = "github.com/cosmos/cosmos-sdk/types/tx/amino";
extend google.protobuf.MessageOptions {
// name is the string used when registering a concrete
// type into the Amino type registry, via the Amino codec's
// `RegisterConcrete()` method. This string MUST be at most 39
// characters long, or else the message will be rejected by the
// Ledger hardware device.
string name = 11110001;
// encoding describes the encoding format used by Amino for the given
// message. The field type is chosen to be a string for
// flexibility, but it should ideally be short and expected to be
// machine-readable, for example "base64" or "utf8_json". We
// highly recommend to use underscores for word separation instead of spaces.
//
// If left empty, then the Amino encoding is expected to be the same as the
// Protobuf one.
//
// This annotation should not be confused with the `encoding`
// one which operates on the field level.
string message_encoding = 11110002;
}
extend google.protobuf.FieldOptions {
// encoding describes the encoding format used by Amino for
// the given field. The field type is chosen to be a string for
// flexibility, but it should ideally be short and expected to be
// machine-readable, for example "base64" or "utf8_json". We
// highly recommend to use underscores for word separation instead of spaces.
//
// If left empty, then the Amino encoding is expected to be the same as the
// Protobuf one.
//
// This annotation should not be confused with the
// `message_encoding` one which operates on the message level.
string encoding = 11110003;
// field_name sets a different field name (i.e. key name) in
// the amino JSON object for the given field.
//
// Example:
//
// message Foo {
// string bar = 1 [(amino.field_name) = "baz"];
// }
//
// Then the Amino encoding of Foo will be:
// `{"baz":"some value"}`
string field_name = 11110004;
// dont_omitempty sets the field in the JSON object even if
// its value is empty, i.e. equal to the Golang zero value. To learn what
// the zero values are, see https://go.dev/ref/spec#The_zero_value.
//
// Fields default to `omitempty`, which is the default behavior when this
// annotation is unset. When set to true, then the field value in the
// JSON object will be set, i.e. not `undefined`.
//
// Example:
//
// message Foo {
// string bar = 1;
// string baz = 2 [(amino.dont_omitempty) = true];
// }
//
// f := Foo{};
// out := AminoJSONEncoder(&f);
// out == {"baz":""}
bool dont_omitempty = 11110005;
// oneof_name sets the type name for the given field oneof field. This is used
// by the Amino JSON encoder to encode the type of the oneof field, and must be the same string in
// the RegisterConcrete() method usage used to register the concrete type.
string oneof_name = 11110006;
}

View File

@ -0,0 +1,62 @@
syntax = "proto3";
package cosmos.auth.v1beta1;
import "amino/amino.proto";
import "cosmos_proto/cosmos.proto";
import "gogoproto/gogo.proto";
import "google/protobuf/any.proto";
option go_package = "github.com/cosmos/cosmos-sdk/x/auth/types";
// BaseAccount defines a base account type. It contains all the necessary fields
// for basic account functionality. Any custom account type should extend this
// type for additional functionality (e.g. vesting).
message BaseAccount {
option (amino.name) = "cosmos-sdk/BaseAccount";
option (gogoproto.goproto_getters) = false;
option (gogoproto.equal) = false;
option (cosmos_proto.implements_interface) = "cosmos.auth.v1beta1.AccountI";
string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"];
google.protobuf.Any pub_key = 2 [(gogoproto.jsontag) = "public_key,omitempty", (amino.field_name) = "public_key"];
uint64 account_number = 3;
uint64 sequence = 4;
}
// ModuleAccount defines an account for modules that holds coins on a pool.
message ModuleAccount {
option (amino.name) = "cosmos-sdk/ModuleAccount";
option (amino.message_encoding) = "module_account";
option (gogoproto.goproto_getters) = false;
option (cosmos_proto.implements_interface) = "cosmos.auth.v1beta1.ModuleAccountI";
BaseAccount base_account = 1 [(gogoproto.embed) = true];
string name = 2;
repeated string permissions = 3;
}
// ModuleCredential represents a unclaimable pubkey for base accounts controlled by modules.
message ModuleCredential {
option (amino.name) = "cosmos-sdk/GroupAccountCredential";
option (cosmos_proto.message_added_in) = "cosmos-sdk 0.47";
// module_name is the name of the module used for address derivation (passed into address.Module).
string module_name = 1;
// derivation_keys is for deriving a module account address (passed into address.Module)
// adding more keys creates sub-account addresses (passed into address.Derive)
repeated bytes derivation_keys = 2;
}
// Params defines the parameters for the auth module.
message Params {
option (amino.name) = "cosmos-sdk/x/auth/Params";
option (gogoproto.equal) = true;
uint64 max_memo_characters = 1;
uint64 tx_sig_limit = 2;
uint64 tx_size_cost_per_byte = 3;
uint64 sig_verify_cost_ed25519 = 4 [(gogoproto.customname) = "SigVerifyCostED25519"];
uint64 sig_verify_cost_secp256k1 = 5 [(gogoproto.customname) = "SigVerifyCostSecp256k1"];
}

View File

@ -0,0 +1,55 @@
syntax = "proto3";
package cosmos.base.query.v1beta1;
option go_package = "github.com/cosmos/cosmos-sdk/types/query";
import "cosmos_proto/cosmos.proto";
// PageRequest is to be embedded in gRPC request messages for efficient
// pagination. Ex:
//
// message SomeRequest {
// Foo some_parameter = 1;
// PageRequest pagination = 2;
// }
message PageRequest {
// key is a value returned in PageResponse.next_key to begin
// querying the next page most efficiently. Only one of offset or key
// should be set.
bytes key = 1;
// offset is a numeric offset that can be used when key is unavailable.
// It is less efficient than using key. Only one of offset or key should
// be set.
uint64 offset = 2;
// limit is the total number of results to be returned in the result page.
// If left empty it will default to a value to be set by each app.
uint64 limit = 3;
// count_total is set to true to indicate that the result set should include
// a count of the total number of items available for pagination in UIs.
// count_total is only respected when offset is used. It is ignored when key
// is set.
bool count_total = 4;
// reverse is set to true if results are to be returned in the descending order.
bool reverse = 5 [(cosmos_proto.field_added_in) = "cosmos-sdk 0.43"];
}
// PageResponse is to be embedded in gRPC response messages where the
// corresponding request message has used PageRequest.
//
// message SomeResponse {
// repeated Bar results = 1;
// PageResponse page = 2;
// }
message PageResponse {
// next_key is the key to be passed to PageRequest.key to
// query the next page most efficiently. It will be empty if
// there are no more results.
bytes next_key = 1;
// total is total number of results available if PageRequest.count_total
// was set, its value is undefined otherwise
uint64 total = 2;
}

View File

@ -0,0 +1,30 @@
syntax = "proto3";
package cosmos.msg.v1;
import "google/protobuf/descriptor.proto";
// TODO(fdymylja): once we fully migrate to protov2 the go_package needs to be updated.
// We need this right now because gogoproto codegen needs to import the extension.
option go_package = "github.com/cosmos/cosmos-sdk/types/msgservice";
extend google.protobuf.ServiceOptions {
// service indicates that the service is a Msg service and that requests
// must be transported via blockchain transactions rather than gRPC.
// Tooling can use this annotation to distinguish between Msg services and
// other types of services via reflection.
bool service = 11110000;
}
extend google.protobuf.MessageOptions {
// signer must be used in cosmos messages in order
// to signal to external clients which fields in a
// given cosmos message must be filled with signer
// information (address).
// The field must be the protobuf name of the message
// field extended with this MessageOption.
// The field must either be of string kind, or of message
// kind in case the signer information is contained within
// a message inside the cosmos message.
repeated string signer = 11110000;
}

View File

@ -0,0 +1,112 @@
syntax = "proto3";
package cosmos_proto;
import "google/protobuf/descriptor.proto";
option go_package = "github.com/cosmos/cosmos-proto;cosmos_proto";
extend google.protobuf.MethodOptions {
// method_added_in is used to indicate from which version the method was added.
string method_added_in = 93001;
}
extend google.protobuf.MessageOptions {
// implements_interface is used to indicate the type name of the interface
// that a message implements so that it can be used in google.protobuf.Any
// fields that accept that interface. A message can implement multiple
// interfaces. Interfaces should be declared using a declare_interface
// file option.
repeated string implements_interface = 93001;
// message_added_in is used to indicate from which version the message was added.
string message_added_in = 93002;
}
extend google.protobuf.FieldOptions {
// accepts_interface is used to annotate that a google.protobuf.Any
// field accepts messages that implement the specified interface.
// Interfaces should be declared using a declare_interface file option.
string accepts_interface = 93001;
// scalar is used to indicate that this field follows the formatting defined
// by the named scalar which should be declared with declare_scalar. Code
// generators may choose to use this information to map this field to a
// language-specific type representing the scalar.
string scalar = 93002;
// field_added_in is used to indicate from which version the field was added.
string field_added_in = 93003;
}
extend google.protobuf.FileOptions {
// declare_interface declares an interface type to be used with
// accepts_interface and implements_interface. Interface names are
// expected to follow the following convention such that their declaration
// can be discovered by tools: for a given interface type a.b.C, it is
// expected that the declaration will be found in a protobuf file named
// a/b/interfaces.proto in the file descriptor set.
repeated InterfaceDescriptor declare_interface = 793021;
// declare_scalar declares a scalar type to be used with
// the scalar field option. Scalar names are
// expected to follow the following convention such that their declaration
// can be discovered by tools: for a given scalar type a.b.C, it is
// expected that the declaration will be found in a protobuf file named
// a/b/scalars.proto in the file descriptor set.
repeated ScalarDescriptor declare_scalar = 793022;
// file_added_in is used to indicate from which the version the file was added.
string file_added_in = 793023;
}
// InterfaceDescriptor describes an interface type to be used with
// accepts_interface and implements_interface and declared by declare_interface.
message InterfaceDescriptor {
// name is the name of the interface. It should be a short-name (without
// a period) such that the fully qualified name of the interface will be
// package.name, ex. for the package a.b and interface named C, the
// fully-qualified name will be a.b.C.
string name = 1;
// description is a human-readable description of the interface and its
// purpose.
string description = 2;
}
// ScalarDescriptor describes an scalar type to be used with
// the scalar field option and declared by declare_scalar.
// Scalars extend simple protobuf built-in types with additional
// syntax and semantics, for instance to represent big integers.
// Scalars should ideally define an encoding such that there is only one
// valid syntactical representation for a given semantic meaning,
// i.e. the encoding should be deterministic.
message ScalarDescriptor {
// name is the name of the scalar. It should be a short-name (without
// a period) such that the fully qualified name of the scalar will be
// package.name, ex. for the package a.b and scalar named C, the
// fully-qualified name will be a.b.C.
string name = 1;
// description is a human-readable description of the scalar and its
// encoding format. For instance a big integer or decimal scalar should
// specify precisely the expected encoding format.
string description = 2;
// field_type is the type of field with which this scalar can be used.
// Scalars can be used with one and only one type of field so that
// encoding standards and simple and clear. Currently only string and
// bytes fields are supported for scalars.
repeated ScalarType field_type = 3;
}
enum ScalarType {
SCALAR_TYPE_UNSPECIFIED = 0;
SCALAR_TYPE_STRING = 1;
SCALAR_TYPE_BYTES = 2;
}

145
proto/gogoproto/gogo.proto Normal file
View File

@ -0,0 +1,145 @@
// Protocol Buffers for Go with Gadgets
//
// Copyright (c) 2013, The GoGo Authors. All rights reserved.
// http://github.com/gogo/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
syntax = "proto2";
package gogoproto;
import "google/protobuf/descriptor.proto";
option java_package = "com.google.protobuf";
option java_outer_classname = "GoGoProtos";
option go_package = "github.com/gogo/protobuf/gogoproto";
extend google.protobuf.EnumOptions {
optional bool goproto_enum_prefix = 62001;
optional bool goproto_enum_stringer = 62021;
optional bool enum_stringer = 62022;
optional string enum_customname = 62023;
optional bool enumdecl = 62024;
}
extend google.protobuf.EnumValueOptions {
optional string enumvalue_customname = 66001;
}
extend google.protobuf.FileOptions {
optional bool goproto_getters_all = 63001;
optional bool goproto_enum_prefix_all = 63002;
optional bool goproto_stringer_all = 63003;
optional bool verbose_equal_all = 63004;
optional bool face_all = 63005;
optional bool gostring_all = 63006;
optional bool populate_all = 63007;
optional bool stringer_all = 63008;
optional bool onlyone_all = 63009;
optional bool equal_all = 63013;
optional bool description_all = 63014;
optional bool testgen_all = 63015;
optional bool benchgen_all = 63016;
optional bool marshaler_all = 63017;
optional bool unmarshaler_all = 63018;
optional bool stable_marshaler_all = 63019;
optional bool sizer_all = 63020;
optional bool goproto_enum_stringer_all = 63021;
optional bool enum_stringer_all = 63022;
optional bool unsafe_marshaler_all = 63023;
optional bool unsafe_unmarshaler_all = 63024;
optional bool goproto_extensions_map_all = 63025;
optional bool goproto_unrecognized_all = 63026;
optional bool gogoproto_import = 63027;
optional bool protosizer_all = 63028;
optional bool compare_all = 63029;
optional bool typedecl_all = 63030;
optional bool enumdecl_all = 63031;
optional bool goproto_registration = 63032;
optional bool messagename_all = 63033;
optional bool goproto_sizecache_all = 63034;
optional bool goproto_unkeyed_all = 63035;
}
extend google.protobuf.MessageOptions {
optional bool goproto_getters = 64001;
optional bool goproto_stringer = 64003;
optional bool verbose_equal = 64004;
optional bool face = 64005;
optional bool gostring = 64006;
optional bool populate = 64007;
optional bool stringer = 67008;
optional bool onlyone = 64009;
optional bool equal = 64013;
optional bool description = 64014;
optional bool testgen = 64015;
optional bool benchgen = 64016;
optional bool marshaler = 64017;
optional bool unmarshaler = 64018;
optional bool stable_marshaler = 64019;
optional bool sizer = 64020;
optional bool unsafe_marshaler = 64023;
optional bool unsafe_unmarshaler = 64024;
optional bool goproto_extensions_map = 64025;
optional bool goproto_unrecognized = 64026;
optional bool protosizer = 64028;
optional bool compare = 64029;
optional bool typedecl = 64030;
optional bool messagename = 64033;
optional bool goproto_sizecache = 64034;
optional bool goproto_unkeyed = 64035;
}
extend google.protobuf.FieldOptions {
optional bool nullable = 65001;
optional bool embed = 65002;
optional string customtype = 65003;
optional string customname = 65004;
optional string jsontag = 65005;
optional string moretags = 65006;
optional string casttype = 65007;
optional string castkey = 65008;
optional string castvalue = 65009;
optional bool stdtime = 65010;
optional bool stdduration = 65011;
optional bool wktpointer = 65012;
optional string castrepeated = 65013;
}

View File

@ -0,0 +1,31 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.api;
import "google/api/http.proto";
import "google/protobuf/descriptor.proto";
option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
option java_multiple_files = true;
option java_outer_classname = "AnnotationsProto";
option java_package = "com.google.api";
option objc_class_prefix = "GAPI";
extend google.protobuf.MethodOptions {
// See `HttpRule`.
HttpRule http = 72295728;
}

370
proto/google/api/http.proto Normal file
View File

@ -0,0 +1,370 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.api;
option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
option java_multiple_files = true;
option java_outer_classname = "HttpProto";
option java_package = "com.google.api";
option objc_class_prefix = "GAPI";
// Defines the HTTP configuration for an API service. It contains a list of
// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method
// to one or more HTTP REST API methods.
message Http {
// A list of HTTP configuration rules that apply to individual API methods.
//
// **NOTE:** All service configuration rules follow "last one wins" order.
repeated HttpRule rules = 1;
// When set to true, URL path parameters will be fully URI-decoded except in
// cases of single segment matches in reserved expansion, where "%2F" will be
// left encoded.
//
// The default behavior is to not decode RFC 6570 reserved characters in multi
// segment matches.
bool fully_decode_reserved_expansion = 2;
}
// gRPC Transcoding
//
// gRPC Transcoding is a feature for mapping between a gRPC method and one or
// more HTTP REST endpoints. It allows developers to build a single API service
// that supports both gRPC APIs and REST APIs. Many systems, including [Google
// APIs](https://github.com/googleapis/googleapis),
// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC
// Gateway](https://github.com/grpc-ecosystem/grpc-gateway),
// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature
// and use it for large scale production services.
//
// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies
// how different portions of the gRPC request message are mapped to the URL
// path, URL query parameters, and HTTP request body. It also controls how the
// gRPC response message is mapped to the HTTP response body. `HttpRule` is
// typically specified as an `google.api.http` annotation on the gRPC method.
//
// Each mapping specifies a URL path template and an HTTP method. The path
// template may refer to one or more fields in the gRPC request message, as long
// as each field is a non-repeated field with a primitive (non-message) type.
// The path template controls how fields of the request message are mapped to
// the URL path.
//
// Example:
//
// service Messaging {
// rpc GetMessage(GetMessageRequest) returns (Message) {
// option (google.api.http) = {
// get: "/v1/{name=messages/*}"
// };
// }
// }
// message GetMessageRequest {
// string name = 1; // Mapped to URL path.
// }
// message Message {
// string text = 1; // The resource content.
// }
//
// This enables an HTTP REST to gRPC mapping as below:
//
// - HTTP: `GET /v1/messages/123456`
// - gRPC: `GetMessage(name: "messages/123456")`
//
// Any fields in the request message which are not bound by the path template
// automatically become HTTP query parameters if there is no HTTP request body.
// For example:
//
// service Messaging {
// rpc GetMessage(GetMessageRequest) returns (Message) {
// option (google.api.http) = {
// get:"/v1/messages/{message_id}"
// };
// }
// }
// message GetMessageRequest {
// message SubMessage {
// string subfield = 1;
// }
// string message_id = 1; // Mapped to URL path.
// int64 revision = 2; // Mapped to URL query parameter `revision`.
// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`.
// }
//
// This enables a HTTP JSON to RPC mapping as below:
//
// - HTTP: `GET /v1/messages/123456?revision=2&sub.subfield=foo`
// - gRPC: `GetMessage(message_id: "123456" revision: 2 sub:
// SubMessage(subfield: "foo"))`
//
// Note that fields which are mapped to URL query parameters must have a
// primitive type or a repeated primitive type or a non-repeated message type.
// In the case of a repeated type, the parameter can be repeated in the URL
// as `...?param=A&param=B`. In the case of a message type, each field of the
// message is mapped to a separate parameter, such as
// `...?foo.a=A&foo.b=B&foo.c=C`.
//
// For HTTP methods that allow a request body, the `body` field
// specifies the mapping. Consider a REST update method on the
// message resource collection:
//
// service Messaging {
// rpc UpdateMessage(UpdateMessageRequest) returns (Message) {
// option (google.api.http) = {
// patch: "/v1/messages/{message_id}"
// body: "message"
// };
// }
// }
// message UpdateMessageRequest {
// string message_id = 1; // mapped to the URL
// Message message = 2; // mapped to the body
// }
//
// The following HTTP JSON to RPC mapping is enabled, where the
// representation of the JSON in the request body is determined by
// protos JSON encoding:
//
// - HTTP: `PATCH /v1/messages/123456 { "text": "Hi!" }`
// - gRPC: `UpdateMessage(message_id: "123456" message { text: "Hi!" })`
//
// The special name `*` can be used in the body mapping to define that
// every field not bound by the path template should be mapped to the
// request body. This enables the following alternative definition of
// the update method:
//
// service Messaging {
// rpc UpdateMessage(Message) returns (Message) {
// option (google.api.http) = {
// patch: "/v1/messages/{message_id}"
// body: "*"
// };
// }
// }
// message Message {
// string message_id = 1;
// string text = 2;
// }
//
//
// The following HTTP JSON to RPC mapping is enabled:
//
// - HTTP: `PATCH /v1/messages/123456 { "text": "Hi!" }`
// - gRPC: `UpdateMessage(message_id: "123456" text: "Hi!")`
//
// Note that when using `*` in the body mapping, it is not possible to
// have HTTP parameters, as all fields not bound by the path end in
// the body. This makes this option more rarely used in practice when
// defining REST APIs. The common usage of `*` is in custom methods
// which don't use the URL at all for transferring data.
//
// It is possible to define multiple HTTP methods for one RPC by using
// the `additional_bindings` option. Example:
//
// service Messaging {
// rpc GetMessage(GetMessageRequest) returns (Message) {
// option (google.api.http) = {
// get: "/v1/messages/{message_id}"
// additional_bindings {
// get: "/v1/users/{user_id}/messages/{message_id}"
// }
// };
// }
// }
// message GetMessageRequest {
// string message_id = 1;
// string user_id = 2;
// }
//
// This enables the following two alternative HTTP JSON to RPC mappings:
//
// - HTTP: `GET /v1/messages/123456`
// - gRPC: `GetMessage(message_id: "123456")`
//
// - HTTP: `GET /v1/users/me/messages/123456`
// - gRPC: `GetMessage(user_id: "me" message_id: "123456")`
//
// Rules for HTTP mapping
//
// 1. Leaf request fields (recursive expansion nested messages in the request
// message) are classified into three categories:
// - Fields referred by the path template. They are passed via the URL path.
// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They
// are passed via the HTTP
// request body.
// - All other fields are passed via the URL query parameters, and the
// parameter name is the field path in the request message. A repeated
// field can be represented as multiple query parameters under the same
// name.
// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL
// query parameter, all fields
// are passed via URL path and HTTP request body.
// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP
// request body, all
// fields are passed via URL path and URL query parameters.
//
// Path template syntax
//
// Template = "/" Segments [ Verb ] ;
// Segments = Segment { "/" Segment } ;
// Segment = "*" | "**" | LITERAL | Variable ;
// Variable = "{" FieldPath [ "=" Segments ] "}" ;
// FieldPath = IDENT { "." IDENT } ;
// Verb = ":" LITERAL ;
//
// The syntax `*` matches a single URL path segment. The syntax `**` matches
// zero or more URL path segments, which must be the last part of the URL path
// except the `Verb`.
//
// The syntax `Variable` matches part of the URL path as specified by its
// template. A variable template must not contain other variables. If a variable
// matches a single path segment, its template may be omitted, e.g. `{var}`
// is equivalent to `{var=*}`.
//
// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL`
// contains any reserved character, such characters should be percent-encoded
// before the matching.
//
// If a variable contains exactly one path segment, such as `"{var}"` or
// `"{var=*}"`, when such a variable is expanded into a URL path on the client
// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The
// server side does the reverse decoding. Such variables show up in the
// [Discovery
// Document](https://developers.google.com/discovery/v1/reference/apis) as
// `{var}`.
//
// If a variable contains multiple path segments, such as `"{var=foo/*}"`
// or `"{var=**}"`, when such a variable is expanded into a URL path on the
// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded.
// The server side does the reverse decoding, except "%2F" and "%2f" are left
// unchanged. Such variables show up in the
// [Discovery
// Document](https://developers.google.com/discovery/v1/reference/apis) as
// `{+var}`.
//
// Using gRPC API Service Configuration
//
// gRPC API Service Configuration (service config) is a configuration language
// for configuring a gRPC service to become a user-facing product. The
// service config is simply the YAML representation of the `google.api.Service`
// proto message.
//
// As an alternative to annotating your proto file, you can configure gRPC
// transcoding in your service config YAML files. You do this by specifying a
// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same
// effect as the proto annotation. This can be particularly useful if you
// have a proto that is reused in multiple services. Note that any transcoding
// specified in the service config will override any matching transcoding
// configuration in the proto.
//
// The following example selects a gRPC method and applies an `HttpRule` to it:
//
// http:
// rules:
// - selector: example.v1.Messaging.GetMessage
// get: /v1/messages/{message_id}/{sub.subfield}
//
// Special notes
//
// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the
// proto to JSON conversion must follow the [proto3
// specification](https://developers.google.com/protocol-buffers/docs/proto3#json).
//
// While the single segment variable follows the semantics of
// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String
// Expansion, the multi segment variable **does not** follow RFC 6570 Section
// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion
// does not expand special characters like `?` and `#`, which would lead
// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding
// for multi segment variables.
//
// The path variables **must not** refer to any repeated or mapped field,
// because client libraries are not capable of handling such variable expansion.
//
// The path variables **must not** capture the leading "/" character. The reason
// is that the most common use case "{var}" does not capture the leading "/"
// character. For consistency, all path variables must share the same behavior.
//
// Repeated message fields must not be mapped to URL query parameters, because
// no client library can support such complicated mapping.
//
// If an API needs to use a JSON array for request or response body, it can map
// the request or response body to a repeated field. However, some gRPC
// Transcoding implementations may not support this feature.
message HttpRule {
// Selects a method to which this rule applies.
//
// Refer to [selector][google.api.DocumentationRule.selector] for syntax
// details.
string selector = 1;
// Determines the URL pattern is matched by this rules. This pattern can be
// used with any of the {get|put|post|delete|patch} methods. A custom method
// can be defined using the 'custom' field.
oneof pattern {
// Maps to HTTP GET. Used for listing and getting information about
// resources.
string get = 2;
// Maps to HTTP PUT. Used for replacing a resource.
string put = 3;
// Maps to HTTP POST. Used for creating a resource or performing an action.
string post = 4;
// Maps to HTTP DELETE. Used for deleting a resource.
string delete = 5;
// Maps to HTTP PATCH. Used for updating a resource.
string patch = 6;
// The custom pattern is used for specifying an HTTP method that is not
// included in the `pattern` field, such as HEAD, or "*" to leave the
// HTTP method unspecified for this rule. The wild-card rule is useful
// for services that provide content to Web (HTML) clients.
CustomHttpPattern custom = 8;
}
// The name of the request field whose value is mapped to the HTTP request
// body, or `*` for mapping all request fields not captured by the path
// pattern to the HTTP body, or omitted for not having any HTTP request body.
//
// NOTE: the referred field must be present at the top-level of the request
// message type.
string body = 7;
// Optional. The name of the response field whose value is mapped to the HTTP
// response body. When omitted, the entire response message will be used
// as the HTTP response body.
//
// NOTE: The referred field must be present at the top-level of the response
// message type.
string response_body = 12;
// Additional HTTP bindings for the selector. Nested bindings must
// not contain an `additional_bindings` field themselves (that is,
// the nesting may only be one level deep).
repeated HttpRule additional_bindings = 11;
}
// A custom pattern is used for defining custom HTTP verb.
message CustomHttpPattern {
// The name of this custom HTTP verb.
string kind = 1;
// The path matched by this custom verb.
string path = 2;
}

View File

@ -0,0 +1,162 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
syntax = "proto3";
package google.protobuf;
option go_package = "google.golang.org/protobuf/types/known/anypb";
option java_package = "com.google.protobuf";
option java_outer_classname = "AnyProto";
option java_multiple_files = true;
option objc_class_prefix = "GPB";
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
// `Any` contains an arbitrary serialized protocol buffer message along with a
// URL that describes the type of the serialized message.
//
// Protobuf library provides support to pack/unpack Any values in the form
// of utility functions or additional generated methods of the Any type.
//
// Example 1: Pack and unpack a message in C++.
//
// Foo foo = ...;
// Any any;
// any.PackFrom(foo);
// ...
// if (any.UnpackTo(&foo)) {
// ...
// }
//
// Example 2: Pack and unpack a message in Java.
//
// Foo foo = ...;
// Any any = Any.pack(foo);
// ...
// if (any.is(Foo.class)) {
// foo = any.unpack(Foo.class);
// }
// // or ...
// if (any.isSameTypeAs(Foo.getDefaultInstance())) {
// foo = any.unpack(Foo.getDefaultInstance());
// }
//
// Example 3: Pack and unpack a message in Python.
//
// foo = Foo(...)
// any = Any()
// any.Pack(foo)
// ...
// if any.Is(Foo.DESCRIPTOR):
// any.Unpack(foo)
// ...
//
// Example 4: Pack and unpack a message in Go
//
// foo := &pb.Foo{...}
// any, err := anypb.New(foo)
// if err != nil {
// ...
// }
// ...
// foo := &pb.Foo{}
// if err := any.UnmarshalTo(foo); err != nil {
// ...
// }
//
// The pack methods provided by protobuf library will by default use
// 'type.googleapis.com/full.type.name' as the type URL and the unpack
// methods only use the fully qualified type name after the last '/'
// in the type URL, for example "foo.bar.com/x/y.z" will yield type
// name "y.z".
//
// JSON
// ====
// The JSON representation of an `Any` value uses the regular
// representation of the deserialized, embedded message, with an
// additional field `@type` which contains the type URL. Example:
//
// package google.profile;
// message Person {
// string first_name = 1;
// string last_name = 2;
// }
//
// {
// "@type": "type.googleapis.com/google.profile.Person",
// "firstName": <string>,
// "lastName": <string>
// }
//
// If the embedded message type is well-known and has a custom JSON
// representation, that representation will be embedded adding a field
// `value` which holds the custom JSON in addition to the `@type`
// field. Example (for message [google.protobuf.Duration][]):
//
// {
// "@type": "type.googleapis.com/google.protobuf.Duration",
// "value": "1.212s"
// }
//
message Any {
// A URL/resource name that uniquely identifies the type of the serialized
// protocol buffer message. This string must contain at least
// one "/" character. The last segment of the URL's path must represent
// the fully qualified name of the type (as in
// `path/google.protobuf.Duration`). The name should be in a canonical form
// (e.g., leading "." is not accepted).
//
// In practice, teams usually precompile into the binary all types that they
// expect it to use in the context of Any. However, for URLs which use the
// scheme `http`, `https`, or no scheme, one can optionally set up a type
// server that maps type URLs to message definitions as follows:
//
// * If no scheme is provided, `https` is assumed.
// * An HTTP GET on the URL must yield a [google.protobuf.Type][]
// value in binary format, or produce an error.
// * Applications are allowed to cache lookup results based on the
// URL, or have them precompiled into a binary to avoid any
// lookup. Therefore, binary compatibility needs to be preserved
// on changes to types. (Use versioned type names to manage
// breaking changes.)
//
// Note: this functionality is not currently available in the official
// protobuf release, and it is not used for type URLs beginning with
// type.googleapis.com. As of May 2023, there are no widely used type server
// implementations and no plans to implement one.
//
// Schemes other than `http`, `https` (or the empty scheme) might be
// used with implementation specific semantics.
//
string type_url = 1;
// Must be a valid serialized protocol buffer of the above specified type.
bytes value = 2;
}

View File

@ -0,0 +1,921 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Author: kenton@google.com (Kenton Varda)
// Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others.
//
// The messages in this file describe the definitions found in .proto files.
// A valid .proto file can be translated directly to a FileDescriptorProto
// without any other information (e.g. without reading its imports).
syntax = "proto2";
package google.protobuf;
option go_package = "google.golang.org/protobuf/types/descriptorpb";
option java_package = "com.google.protobuf";
option java_outer_classname = "DescriptorProtos";
option csharp_namespace = "Google.Protobuf.Reflection";
option objc_class_prefix = "GPB";
option cc_enable_arenas = true;
// descriptor.proto must be optimized for speed because reflection-based
// algorithms don't work during bootstrapping.
option optimize_for = SPEED;
// The protocol compiler can output a FileDescriptorSet containing the .proto
// files it parses.
message FileDescriptorSet {
repeated FileDescriptorProto file = 1;
}
// Describes a complete .proto file.
message FileDescriptorProto {
optional string name = 1; // file name, relative to root of source tree
optional string package = 2; // e.g. "foo", "foo.bar", etc.
// Names of files imported by this file.
repeated string dependency = 3;
// Indexes of the public imported files in the dependency list above.
repeated int32 public_dependency = 10;
// Indexes of the weak imported files in the dependency list.
// For Google-internal migration only. Do not use.
repeated int32 weak_dependency = 11;
// All top-level definitions in this file.
repeated DescriptorProto message_type = 4;
repeated EnumDescriptorProto enum_type = 5;
repeated ServiceDescriptorProto service = 6;
repeated FieldDescriptorProto extension = 7;
optional FileOptions options = 8;
// This field contains optional information about the original source code.
// You may safely remove this entire field without harming runtime
// functionality of the descriptors -- the information is needed only by
// development tools.
optional SourceCodeInfo source_code_info = 9;
// The syntax of the proto file.
// The supported values are "proto2" and "proto3".
optional string syntax = 12;
}
// Describes a message type.
message DescriptorProto {
optional string name = 1;
repeated FieldDescriptorProto field = 2;
repeated FieldDescriptorProto extension = 6;
repeated DescriptorProto nested_type = 3;
repeated EnumDescriptorProto enum_type = 4;
message ExtensionRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Exclusive.
optional ExtensionRangeOptions options = 3;
}
repeated ExtensionRange extension_range = 5;
repeated OneofDescriptorProto oneof_decl = 8;
optional MessageOptions options = 7;
// Range of reserved tag numbers. Reserved tag numbers may not be used by
// fields or extension ranges in the same message. Reserved ranges may
// not overlap.
message ReservedRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Exclusive.
}
repeated ReservedRange reserved_range = 9;
// Reserved field names, which may not be used by fields in the same message.
// A given name may only be reserved once.
repeated string reserved_name = 10;
}
message ExtensionRangeOptions {
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
// Describes a field within a message.
message FieldDescriptorProto {
enum Type {
// 0 is reserved for errors.
// Order is weird for historical reasons.
TYPE_DOUBLE = 1;
TYPE_FLOAT = 2;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
// negative values are likely.
TYPE_INT64 = 3;
TYPE_UINT64 = 4;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
// negative values are likely.
TYPE_INT32 = 5;
TYPE_FIXED64 = 6;
TYPE_FIXED32 = 7;
TYPE_BOOL = 8;
TYPE_STRING = 9;
// Tag-delimited aggregate.
// Group type is deprecated and not supported in proto3. However, Proto3
// implementations should still be able to parse the group wire format and
// treat group fields as unknown fields.
TYPE_GROUP = 10;
TYPE_MESSAGE = 11; // Length-delimited aggregate.
// New in version 2.
TYPE_BYTES = 12;
TYPE_UINT32 = 13;
TYPE_ENUM = 14;
TYPE_SFIXED32 = 15;
TYPE_SFIXED64 = 16;
TYPE_SINT32 = 17; // Uses ZigZag encoding.
TYPE_SINT64 = 18; // Uses ZigZag encoding.
}
enum Label {
// 0 is reserved for errors
LABEL_OPTIONAL = 1;
LABEL_REQUIRED = 2;
LABEL_REPEATED = 3;
}
optional string name = 1;
optional int32 number = 3;
optional Label label = 4;
// If type_name is set, this need not be set. If both this and type_name
// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
optional Type type = 5;
// For message and enum types, this is the name of the type. If the name
// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
// rules are used to find the type (i.e. first the nested types within this
// message are searched, then within the parent, on up to the root
// namespace).
optional string type_name = 6;
// For extensions, this is the name of the type being extended. It is
// resolved in the same manner as type_name.
optional string extendee = 2;
// For numeric types, contains the original text representation of the value.
// For booleans, "true" or "false".
// For strings, contains the default text contents (not escaped in any way).
// For bytes, contains the C escaped value. All bytes >= 128 are escaped.
optional string default_value = 7;
// If set, gives the index of a oneof in the containing type's oneof_decl
// list. This field is a member of that oneof.
optional int32 oneof_index = 9;
// JSON name of this field. The value is set by protocol compiler. If the
// user has set a "json_name" option on this field, that option's value
// will be used. Otherwise, it's deduced from the field's name by converting
// it to camelCase.
optional string json_name = 10;
optional FieldOptions options = 8;
// If true, this is a proto3 "optional". When a proto3 field is optional, it
// tracks presence regardless of field type.
//
// When proto3_optional is true, this field must be belong to a oneof to
// signal to old proto3 clients that presence is tracked for this field. This
// oneof is known as a "synthetic" oneof, and this field must be its sole
// member (each proto3 optional field gets its own synthetic oneof). Synthetic
// oneofs exist in the descriptor only, and do not generate any API. Synthetic
// oneofs must be ordered after all "real" oneofs.
//
// For message fields, proto3_optional doesn't create any semantic change,
// since non-repeated message fields always track presence. However it still
// indicates the semantic detail of whether the user wrote "optional" or not.
// This can be useful for round-tripping the .proto file. For consistency we
// give message fields a synthetic oneof also, even though it is not required
// to track presence. This is especially important because the parser can't
// tell if a field is a message or an enum, so it must always create a
// synthetic oneof.
//
// Proto2 optional fields do not set this flag, because they already indicate
// optional with `LABEL_OPTIONAL`.
optional bool proto3_optional = 17;
}
// Describes a oneof.
message OneofDescriptorProto {
optional string name = 1;
optional OneofOptions options = 2;
}
// Describes an enum type.
message EnumDescriptorProto {
optional string name = 1;
repeated EnumValueDescriptorProto value = 2;
optional EnumOptions options = 3;
// Range of reserved numeric values. Reserved values may not be used by
// entries in the same enum. Reserved ranges may not overlap.
//
// Note that this is distinct from DescriptorProto.ReservedRange in that it
// is inclusive such that it can appropriately represent the entire int32
// domain.
message EnumReservedRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Inclusive.
}
// Range of reserved numeric values. Reserved numeric values may not be used
// by enum values in the same enum declaration. Reserved ranges may not
// overlap.
repeated EnumReservedRange reserved_range = 4;
// Reserved enum value names, which may not be reused. A given name may only
// be reserved once.
repeated string reserved_name = 5;
}
// Describes a value within an enum.
message EnumValueDescriptorProto {
optional string name = 1;
optional int32 number = 2;
optional EnumValueOptions options = 3;
}
// Describes a service.
message ServiceDescriptorProto {
optional string name = 1;
repeated MethodDescriptorProto method = 2;
optional ServiceOptions options = 3;
}
// Describes a method of a service.
message MethodDescriptorProto {
optional string name = 1;
// Input and output type names. These are resolved in the same way as
// FieldDescriptorProto.type_name, but must refer to a message type.
optional string input_type = 2;
optional string output_type = 3;
optional MethodOptions options = 4;
// Identifies if client streams multiple client messages
optional bool client_streaming = 5 [default = false];
// Identifies if server streams multiple server messages
optional bool server_streaming = 6 [default = false];
}
// ===================================================================
// Options
// Each of the definitions above may have "options" attached. These are
// just annotations which may cause code to be generated slightly differently
// or may contain hints for code that manipulates protocol messages.
//
// Clients may define custom options as extensions of the *Options messages.
// These extensions may not yet be known at parsing time, so the parser cannot
// store the values in them. Instead it stores them in a field in the *Options
// message called uninterpreted_option. This field must have the same name
// across all *Options messages. We then use this field to populate the
// extensions when we build a descriptor, at which point all protos have been
// parsed and so all extensions are known.
//
// Extension numbers for custom options may be chosen as follows:
// * For options which will only be used within a single application or
// organization, or for experimental options, use field numbers 50000
// through 99999. It is up to you to ensure that you do not use the
// same number for multiple options.
// * For options which will be published and used publicly by multiple
// independent entities, e-mail protobuf-global-extension-registry@google.com
// to reserve extension numbers. Simply provide your project name (e.g.
// Objective-C plugin) and your project website (if available) -- there's no
// need to explain how you intend to use them. Usually you only need one
// extension number. You can declare multiple options with only one extension
// number by putting them in a sub-message. See the Custom Options section of
// the docs for examples:
// https://developers.google.com/protocol-buffers/docs/proto#options
// If this turns out to be popular, a web service will be set up
// to automatically assign option numbers.
message FileOptions {
// Sets the Java package where classes generated from this .proto will be
// placed. By default, the proto package is used, but this is often
// inappropriate because proto packages do not normally start with backwards
// domain names.
optional string java_package = 1;
// Controls the name of the wrapper Java class generated for the .proto file.
// That class will always contain the .proto file's getDescriptor() method as
// well as any top-level extensions defined in the .proto file.
// If java_multiple_files is disabled, then all the other classes from the
// .proto file will be nested inside the single wrapper outer class.
optional string java_outer_classname = 8;
// If enabled, then the Java code generator will generate a separate .java
// file for each top-level message, enum, and service defined in the .proto
// file. Thus, these types will *not* be nested inside the wrapper class
// named by java_outer_classname. However, the wrapper class will still be
// generated to contain the file's getDescriptor() method as well as any
// top-level extensions defined in the file.
optional bool java_multiple_files = 10 [default = false];
// This option does nothing.
optional bool java_generate_equals_and_hash = 20 [deprecated=true];
// If set true, then the Java2 code generator will generate code that
// throws an exception whenever an attempt is made to assign a non-UTF-8
// byte sequence to a string field.
// Message reflection will do the same.
// However, an extension field still accepts non-UTF-8 byte sequences.
// This option has no effect on when used with the lite runtime.
optional bool java_string_check_utf8 = 27 [default = false];
// Generated classes can be optimized for speed or code size.
enum OptimizeMode {
SPEED = 1; // Generate complete code for parsing, serialization,
// etc.
CODE_SIZE = 2; // Use ReflectionOps to implement these methods.
LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
}
optional OptimizeMode optimize_for = 9 [default = SPEED];
// Sets the Go package where structs generated from this .proto will be
// placed. If omitted, the Go package will be derived from the following:
// - The basename of the package import path, if provided.
// - Otherwise, the package statement in the .proto file, if present.
// - Otherwise, the basename of the .proto file, without extension.
optional string go_package = 11;
// Should generic services be generated in each language? "Generic" services
// are not specific to any particular RPC system. They are generated by the
// main code generators in each language (without additional plugins).
// Generic services were the only kind of service generation supported by
// early versions of google.protobuf.
//
// Generic services are now considered deprecated in favor of using plugins
// that generate code specific to your particular RPC system. Therefore,
// these default to false. Old code which depends on generic services should
// explicitly set them to true.
optional bool cc_generic_services = 16 [default = false];
optional bool java_generic_services = 17 [default = false];
optional bool py_generic_services = 18 [default = false];
optional bool php_generic_services = 42 [default = false];
// Is this file deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for everything in the file, or it will be completely ignored; in the very
// least, this is a formalization for deprecating files.
optional bool deprecated = 23 [default = false];
// Enables the use of arenas for the proto messages in this file. This applies
// only to generated classes for C++.
optional bool cc_enable_arenas = 31 [default = true];
// Sets the objective c class prefix which is prepended to all objective c
// generated classes from this .proto. There is no default.
optional string objc_class_prefix = 36;
// Namespace for generated classes; defaults to the package.
optional string csharp_namespace = 37;
// By default Swift generators will take the proto package and CamelCase it
// replacing '.' with underscore and use that to prefix the types/symbols
// defined. When this options is provided, they will use this value instead
// to prefix the types/symbols defined.
optional string swift_prefix = 39;
// Sets the php class prefix which is prepended to all php generated classes
// from this .proto. Default is empty.
optional string php_class_prefix = 40;
// Use this option to change the namespace of php generated classes. Default
// is empty. When this option is empty, the package name will be used for
// determining the namespace.
optional string php_namespace = 41;
// Use this option to change the namespace of php generated metadata classes.
// Default is empty. When this option is empty, the proto file name will be
// used for determining the namespace.
optional string php_metadata_namespace = 44;
// Use this option to change the package of ruby generated classes. Default
// is empty. When this option is not set, the package name will be used for
// determining the ruby package.
optional string ruby_package = 45;
// The parser stores options it doesn't recognize here.
// See the documentation for the "Options" section above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message.
// See the documentation for the "Options" section above.
extensions 1000 to max;
reserved 38;
}
message MessageOptions {
// Set true to use the old proto1 MessageSet wire format for extensions.
// This is provided for backwards-compatibility with the MessageSet wire
// format. You should not use this for any other reason: It's less
// efficient, has fewer features, and is more complicated.
//
// The message must be defined exactly as follows:
// message Foo {
// option message_set_wire_format = true;
// extensions 4 to max;
// }
// Note that the message cannot have any defined fields; MessageSets only
// have extensions.
//
// All extensions of your type must be singular messages; e.g. they cannot
// be int32s, enums, or repeated messages.
//
// Because this is an option, the above two restrictions are not enforced by
// the protocol compiler.
optional bool message_set_wire_format = 1 [default = false];
// Disables the generation of the standard "descriptor()" accessor, which can
// conflict with a field of the same name. This is meant to make migration
// from proto1 easier; new code should avoid fields named "descriptor".
optional bool no_standard_descriptor_accessor = 2 [default = false];
// Is this message deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the message, or it will be completely ignored; in the very least,
// this is a formalization for deprecating messages.
optional bool deprecated = 3 [default = false];
reserved 4, 5, 6;
// Whether the message is an automatically generated map entry type for the
// maps field.
//
// For maps fields:
// map<KeyType, ValueType> map_field = 1;
// The parsed descriptor looks like:
// message MapFieldEntry {
// option map_entry = true;
// optional KeyType key = 1;
// optional ValueType value = 2;
// }
// repeated MapFieldEntry map_field = 1;
//
// Implementations may choose not to generate the map_entry=true message, but
// use a native map in the target language to hold the keys and values.
// The reflection APIs in such implementations still need to work as
// if the field is a repeated message field.
//
// NOTE: Do not set the option in .proto files. Always use the maps syntax
// instead. The option should only be implicitly set by the proto compiler
// parser.
optional bool map_entry = 7;
reserved 8; // javalite_serializable
reserved 9; // javanano_as_lite
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message FieldOptions {
// The ctype option instructs the C++ code generator to use a different
// representation of the field than it normally would. See the specific
// options below. This option is not yet implemented in the open source
// release -- sorry, we'll try to include it in a future version!
optional CType ctype = 1 [default = STRING];
enum CType {
// Default mode.
STRING = 0;
CORD = 1;
STRING_PIECE = 2;
}
// The packed option can be enabled for repeated primitive fields to enable
// a more efficient representation on the wire. Rather than repeatedly
// writing the tag and type for each element, the entire array is encoded as
// a single length-delimited blob. In proto3, only explicit setting it to
// false will avoid using packed encoding.
optional bool packed = 2;
// The jstype option determines the JavaScript type used for values of the
// field. The option is permitted only for 64 bit integral and fixed types
// (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING
// is represented as JavaScript string, which avoids loss of precision that
// can happen when a large value is converted to a floating point JavaScript.
// Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
// use the JavaScript "number" type. The behavior of the default option
// JS_NORMAL is implementation dependent.
//
// This option is an enum to permit additional types to be added, e.g.
// goog.math.Integer.
optional JSType jstype = 6 [default = JS_NORMAL];
enum JSType {
// Use the default type.
JS_NORMAL = 0;
// Use JavaScript strings.
JS_STRING = 1;
// Use JavaScript numbers.
JS_NUMBER = 2;
}
// Should this field be parsed lazily? Lazy applies only to message-type
// fields. It means that when the outer message is initially parsed, the
// inner message's contents will not be parsed but instead stored in encoded
// form. The inner message will actually be parsed when it is first accessed.
//
// This is only a hint. Implementations are free to choose whether to use
// eager or lazy parsing regardless of the value of this option. However,
// setting this option true suggests that the protocol author believes that
// using lazy parsing on this field is worth the additional bookkeeping
// overhead typically needed to implement it.
//
// This option does not affect the public interface of any generated code;
// all method signatures remain the same. Furthermore, thread-safety of the
// interface is not affected by this option; const methods remain safe to
// call from multiple threads concurrently, while non-const methods continue
// to require exclusive access.
//
//
// Note that implementations may choose not to check required fields within
// a lazy sub-message. That is, calling IsInitialized() on the outer message
// may return true even if the inner message has missing required fields.
// This is necessary because otherwise the inner message would have to be
// parsed in order to perform the check, defeating the purpose of lazy
// parsing. An implementation which chooses not to check required fields
// must be consistent about it. That is, for any particular sub-message, the
// implementation must either *always* check its required fields, or *never*
// check its required fields, regardless of whether or not the message has
// been parsed.
//
// As of 2021, lazy does no correctness checks on the byte stream during
// parsing. This may lead to crashes if and when an invalid byte stream is
// finally parsed upon access.
//
// TODO(b/211906113): Enable validation on lazy fields.
optional bool lazy = 5 [default = false];
// unverified_lazy does no correctness checks on the byte stream. This should
// only be used where lazy with verification is prohibitive for performance
// reasons.
optional bool unverified_lazy = 15 [default = false];
// Is this field deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for accessors, or it will be completely ignored; in the very least, this
// is a formalization for deprecating fields.
optional bool deprecated = 3 [default = false];
// For Google-internal migration only. Do not use.
optional bool weak = 10 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
reserved 4; // removed jtype
}
message OneofOptions {
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message EnumOptions {
// Set this option to true to allow mapping different tag names to the same
// value.
optional bool allow_alias = 2;
// Is this enum deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the enum, or it will be completely ignored; in the very least, this
// is a formalization for deprecating enums.
optional bool deprecated = 3 [default = false];
reserved 5; // javanano_as_lite
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message EnumValueOptions {
// Is this enum value deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the enum value, or it will be completely ignored; in the very least,
// this is a formalization for deprecating enum values.
optional bool deprecated = 1 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message ServiceOptions {
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
// framework. We apologize for hoarding these numbers to ourselves, but
// we were already using them long before we decided to release Protocol
// Buffers.
// Is this service deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the service, or it will be completely ignored; in the very least,
// this is a formalization for deprecating services.
optional bool deprecated = 33 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message MethodOptions {
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
// framework. We apologize for hoarding these numbers to ourselves, but
// we were already using them long before we decided to release Protocol
// Buffers.
// Is this method deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the method, or it will be completely ignored; in the very least,
// this is a formalization for deprecating methods.
optional bool deprecated = 33 [default = false];
// Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
// or neither? HTTP based RPC implementation may choose GET verb for safe
// methods, and PUT verb for idempotent methods instead of the default POST.
enum IdempotencyLevel {
IDEMPOTENCY_UNKNOWN = 0;
NO_SIDE_EFFECTS = 1; // implies idempotent
IDEMPOTENT = 2; // idempotent, but may have side effects
}
optional IdempotencyLevel idempotency_level = 34
[default = IDEMPOTENCY_UNKNOWN];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
// A message representing a option the parser does not recognize. This only
// appears in options protos created by the compiler::Parser class.
// DescriptorPool resolves these when building Descriptor objects. Therefore,
// options protos in descriptor objects (e.g. returned by Descriptor::options(),
// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
// in them.
message UninterpretedOption {
// The name of the uninterpreted option. Each string represents a segment in
// a dot-separated name. is_extension is true iff a segment represents an
// extension (denoted with parentheses in options specs in .proto files).
// E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents
// "foo.(bar.baz).moo".
message NamePart {
required string name_part = 1;
required bool is_extension = 2;
}
repeated NamePart name = 2;
// The value of the uninterpreted option, in whatever type the tokenizer
// identified it as during parsing. Exactly one of these should be set.
optional string identifier_value = 3;
optional uint64 positive_int_value = 4;
optional int64 negative_int_value = 5;
optional double double_value = 6;
optional bytes string_value = 7;
optional string aggregate_value = 8;
}
// ===================================================================
// Optional source code info
// Encapsulates information about the original source file from which a
// FileDescriptorProto was generated.
message SourceCodeInfo {
// A Location identifies a piece of source code in a .proto file which
// corresponds to a particular definition. This information is intended
// to be useful to IDEs, code indexers, documentation generators, and similar
// tools.
//
// For example, say we have a file like:
// message Foo {
// optional string foo = 1;
// }
// Let's look at just the field definition:
// optional string foo = 1;
// ^ ^^ ^^ ^ ^^^
// a bc de f ghi
// We have the following locations:
// span path represents
// [a,i) [ 4, 0, 2, 0 ] The whole field definition.
// [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
// [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
// [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
// [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
//
// Notes:
// - A location may refer to a repeated field itself (i.e. not to any
// particular index within it). This is used whenever a set of elements are
// logically enclosed in a single code segment. For example, an entire
// extend block (possibly containing multiple extension definitions) will
// have an outer location whose path refers to the "extensions" repeated
// field without an index.
// - Multiple locations may have the same path. This happens when a single
// logical declaration is spread out across multiple places. The most
// obvious example is the "extend" block again -- there may be multiple
// extend blocks in the same scope, each of which will have the same path.
// - A location's span is not always a subset of its parent's span. For
// example, the "extendee" of an extension declaration appears at the
// beginning of the "extend" block and is shared by all extensions within
// the block.
// - Just because a location's span is a subset of some other location's span
// does not mean that it is a descendant. For example, a "group" defines
// both a type and a field in a single declaration. Thus, the locations
// corresponding to the type and field and their components will overlap.
// - Code which tries to interpret locations should probably be designed to
// ignore those that it doesn't understand, as more types of locations could
// be recorded in the future.
repeated Location location = 1;
message Location {
// Identifies which part of the FileDescriptorProto was defined at this
// location.
//
// Each element is a field number or an index. They form a path from
// the root FileDescriptorProto to the place where the definition occurs.
// For example, this path:
// [ 4, 3, 2, 7, 1 ]
// refers to:
// file.message_type(3) // 4, 3
// .field(7) // 2, 7
// .name() // 1
// This is because FileDescriptorProto.message_type has field number 4:
// repeated DescriptorProto message_type = 4;
// and DescriptorProto.field has field number 2:
// repeated FieldDescriptorProto field = 2;
// and FieldDescriptorProto.name has field number 1:
// optional string name = 1;
//
// Thus, the above path gives the location of a field name. If we removed
// the last element:
// [ 4, 3, 2, 7 ]
// this path refers to the whole field declaration (from the beginning
// of the label to the terminating semicolon).
repeated int32 path = 1 [packed = true];
// Always has exactly three or four elements: start line, start column,
// end line (optional, otherwise assumed same as start line), end column.
// These are packed into a single field for efficiency. Note that line
// and column numbers are zero-based -- typically you will want to add
// 1 to each before displaying to a user.
repeated int32 span = 2 [packed = true];
// If this SourceCodeInfo represents a complete declaration, these are any
// comments appearing before and after the declaration which appear to be
// attached to the declaration.
//
// A series of line comments appearing on consecutive lines, with no other
// tokens appearing on those lines, will be treated as a single comment.
//
// leading_detached_comments will keep paragraphs of comments that appear
// before (but not connected to) the current element. Each paragraph,
// separated by empty lines, will be one comment element in the repeated
// field.
//
// Only the comment content is provided; comment markers (e.g. //) are
// stripped out. For block comments, leading whitespace and an asterisk
// will be stripped from the beginning of each line other than the first.
// Newlines are included in the output.
//
// Examples:
//
// optional int32 foo = 1; // Comment attached to foo.
// // Comment attached to bar.
// optional int32 bar = 2;
//
// optional string baz = 3;
// // Comment attached to baz.
// // Another line attached to baz.
//
// // Comment attached to moo.
// //
// // Another line attached to moo.
// optional double moo = 4;
//
// // Detached comment for corge. This is not leading or trailing comments
// // to moo or corge because there are blank lines separating it from
// // both.
//
// // Detached comment for corge paragraph 2.
//
// optional string corge = 5;
// /* Block comment attached
// * to corge. Leading asterisks
// * will be removed. */
// /* Block comment attached to
// * grault. */
// optional int32 grault = 6;
//
// // ignored detached comments.
optional string leading_comments = 3;
optional string trailing_comments = 4;
repeated string leading_detached_comments = 6;
}
}
// Describes the relationship between generated code and its original source
// file. A GeneratedCodeInfo message is associated with only one generated
// source file, but may contain references to different source .proto files.
message GeneratedCodeInfo {
// An Annotation connects some span of text in generated code to an element
// of its generating .proto file.
repeated Annotation annotation = 1;
message Annotation {
// Identifies the element in the original source .proto file. This field
// is formatted the same as SourceCodeInfo.Location.path.
repeated int32 path = 1 [packed = true];
// Identifies the filesystem path to the original source .proto.
optional string source_file = 2;
// Identifies the starting offset in bytes in the generated code
// that relates to the identified object.
optional int32 begin = 3;
// Identifies the ending offset in bytes in the generated code that
// relates to the identified offset. The end offset should be one past
// the last relevant byte (so the length of the text = end - begin).
optional int32 end = 4;
}
}

View File

@ -0,0 +1,35 @@
syntax = "proto3";
package laconic.types.v1;
import "amino/amino.proto";
import "cosmos_proto/cosmos.proto";
import "cosmos/auth/v1beta1/auth.proto";
import "gogoproto/gogo.proto";
option go_package = "git.vdb.to/LaconicNetwork/zenithd/types/v1";
// ZenithAccount defines an account storing Azimuth identity
message ZenithAccount {
option (amino.name) = "zenith/ZenithAccount";
option (amino.message_encoding) = "zenith_account";
option (gogoproto.goproto_getters) = false;
option (gogoproto.equal) = false;
option (cosmos_proto.implements_interface) =
"laconic.types.v1.ZenithAccountI";
cosmos.auth.v1beta1.BaseAccount base_account = 1 [ (gogoproto.embed) = true ];
string azimuth_id = 2;
AccountOwnedPoints owned_points = 3 [ (gogoproto.nullable) = false ];
}
// AccountOwnedPoints defines the set of owned points (galaxy and star) for a
// zenith account
message AccountOwnedPoints {
option (gogoproto.goproto_getters) = false;
option (gogoproto.equal) = false;
int64 galaxy = 1;
repeated int64 stars = 2;
}

View File

@ -0,0 +1,53 @@
syntax = "proto3";
package laconic.zenith.v1;
option go_package = "git.vdb.to/LaconicNetwork/zenithd/x/zenith";
import "gogoproto/gogo.proto";
// OwnerChange defines an owner changed event
message OwnerChange {
uint32 point = 1;
string owner = 2;
}
// List of owner change events
// Value type to be used in OwnerChangesCache
message OwnerChangesList {
repeated OwnerChange value = 1 [ (gogoproto.nullable) = false ];
}
// StalledEthHeightSlashingInfo consists of information required to slash
// validators progressively when eth height updation is stalled
message StalledEthHeightSlashingInfo {
int64 start_height = 1;
map<string, int64> validator_power_map = 2;
}
// Attestation represents the attestation payload signed by validators in gentx
message Attestation {
Payload payload = 1;
repeated string signatures = 2;
}
// Payload is the outer payload having Azimuth ID (address); signed with zenithd
// account
message Payload {
string address = 1;
string msg = 2;
InnerPayload payload = 3;
}
// InnerPayload is the inner payload having zenith address; signed with the
// Ethereum account
message InnerPayload {
string address = 1;
string msg = 2;
OwnedPoints owned_points = 3;
}
// OwnedPoints is the set of `@p` names of owned galaxy and star
message OwnedPoints {
string galaxy = 1;
repeated string stars = 2;
}

20
scripts/protocgen.sh Executable file
View File

@ -0,0 +1,20 @@
#!/bin/bash
# NOTE: protoc is required
set -e
REPO_ROOT=$(pwd)
I=$REPO_ROOT/proto
DEST_TS=$REPO_ROOT/src/proto/
echo "Generating protobuf files"
mkdir -p $DEST_TS
protoc \
--plugin=$REPO_ROOT/node_modules/.bin/protoc-gen-ts_proto \
--ts_proto_out=$DEST_TS \
--proto_path=$I \
--ts_proto_opt="esModuleInterop=true,forceLong=long,useOptionals=messages" \
$(find $REPO_ROOT/proto/laconic -type f -iname "*.proto"
)

9
src/proto/amino/amino.ts Normal file
View File

@ -0,0 +1,9 @@
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.1
// protoc v3.12.4
// source: amino/amino.proto
/* eslint-disable */
export const protobufPackage = "amino";

View File

@ -0,0 +1,534 @@
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.1
// protoc v3.12.4
// source: cosmos/auth/v1beta1/auth.proto
/* eslint-disable */
import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
import Long from "long";
import { Any } from "../../../google/protobuf/any";
export const protobufPackage = "cosmos.auth.v1beta1";
/**
* BaseAccount defines a base account type. It contains all the necessary fields
* for basic account functionality. Any custom account type should extend this
* type for additional functionality (e.g. vesting).
*/
export interface BaseAccount {
address: string;
pubKey?: Any | undefined;
accountNumber: Long;
sequence: Long;
}
/** ModuleAccount defines an account for modules that holds coins on a pool. */
export interface ModuleAccount {
baseAccount?: BaseAccount | undefined;
name: string;
permissions: string[];
}
/** ModuleCredential represents a unclaimable pubkey for base accounts controlled by modules. */
export interface ModuleCredential {
/** module_name is the name of the module used for address derivation (passed into address.Module). */
moduleName: string;
/**
* derivation_keys is for deriving a module account address (passed into address.Module)
* adding more keys creates sub-account addresses (passed into address.Derive)
*/
derivationKeys: Uint8Array[];
}
/** Params defines the parameters for the auth module. */
export interface Params {
maxMemoCharacters: Long;
txSigLimit: Long;
txSizeCostPerByte: Long;
sigVerifyCostEd25519: Long;
sigVerifyCostSecp256k1: Long;
}
function createBaseBaseAccount(): BaseAccount {
return { address: "", pubKey: undefined, accountNumber: Long.UZERO, sequence: Long.UZERO };
}
export const BaseAccount: MessageFns<BaseAccount> = {
encode(message: BaseAccount, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.address !== "") {
writer.uint32(10).string(message.address);
}
if (message.pubKey !== undefined) {
Any.encode(message.pubKey, writer.uint32(18).fork()).join();
}
if (!message.accountNumber.equals(Long.UZERO)) {
writer.uint32(24).uint64(message.accountNumber.toString());
}
if (!message.sequence.equals(Long.UZERO)) {
writer.uint32(32).uint64(message.sequence.toString());
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): BaseAccount {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseBaseAccount();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.address = reader.string();
continue;
}
case 2: {
if (tag !== 18) {
break;
}
message.pubKey = Any.decode(reader, reader.uint32());
continue;
}
case 3: {
if (tag !== 24) {
break;
}
message.accountNumber = Long.fromString(reader.uint64().toString(), true);
continue;
}
case 4: {
if (tag !== 32) {
break;
}
message.sequence = Long.fromString(reader.uint64().toString(), true);
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): BaseAccount {
return {
address: isSet(object.address) ? globalThis.String(object.address) : "",
pubKey: isSet(object.pubKey) ? Any.fromJSON(object.pubKey) : undefined,
accountNumber: isSet(object.accountNumber) ? Long.fromValue(object.accountNumber) : Long.UZERO,
sequence: isSet(object.sequence) ? Long.fromValue(object.sequence) : Long.UZERO,
};
},
toJSON(message: BaseAccount): unknown {
const obj: any = {};
if (message.address !== "") {
obj.address = message.address;
}
if (message.pubKey !== undefined) {
obj.pubKey = Any.toJSON(message.pubKey);
}
if (!message.accountNumber.equals(Long.UZERO)) {
obj.accountNumber = (message.accountNumber || Long.UZERO).toString();
}
if (!message.sequence.equals(Long.UZERO)) {
obj.sequence = (message.sequence || Long.UZERO).toString();
}
return obj;
},
create<I extends Exact<DeepPartial<BaseAccount>, I>>(base?: I): BaseAccount {
return BaseAccount.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<BaseAccount>, I>>(object: I): BaseAccount {
const message = createBaseBaseAccount();
message.address = object.address ?? "";
message.pubKey = (object.pubKey !== undefined && object.pubKey !== null)
? Any.fromPartial(object.pubKey)
: undefined;
message.accountNumber = (object.accountNumber !== undefined && object.accountNumber !== null)
? Long.fromValue(object.accountNumber)
: Long.UZERO;
message.sequence = (object.sequence !== undefined && object.sequence !== null)
? Long.fromValue(object.sequence)
: Long.UZERO;
return message;
},
};
function createBaseModuleAccount(): ModuleAccount {
return { baseAccount: undefined, name: "", permissions: [] };
}
export const ModuleAccount: MessageFns<ModuleAccount> = {
encode(message: ModuleAccount, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.baseAccount !== undefined) {
BaseAccount.encode(message.baseAccount, writer.uint32(10).fork()).join();
}
if (message.name !== "") {
writer.uint32(18).string(message.name);
}
for (const v of message.permissions) {
writer.uint32(26).string(v!);
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): ModuleAccount {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseModuleAccount();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.baseAccount = BaseAccount.decode(reader, reader.uint32());
continue;
}
case 2: {
if (tag !== 18) {
break;
}
message.name = reader.string();
continue;
}
case 3: {
if (tag !== 26) {
break;
}
message.permissions.push(reader.string());
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): ModuleAccount {
return {
baseAccount: isSet(object.baseAccount) ? BaseAccount.fromJSON(object.baseAccount) : undefined,
name: isSet(object.name) ? globalThis.String(object.name) : "",
permissions: globalThis.Array.isArray(object?.permissions)
? object.permissions.map((e: any) => globalThis.String(e))
: [],
};
},
toJSON(message: ModuleAccount): unknown {
const obj: any = {};
if (message.baseAccount !== undefined) {
obj.baseAccount = BaseAccount.toJSON(message.baseAccount);
}
if (message.name !== "") {
obj.name = message.name;
}
if (message.permissions?.length) {
obj.permissions = message.permissions;
}
return obj;
},
create<I extends Exact<DeepPartial<ModuleAccount>, I>>(base?: I): ModuleAccount {
return ModuleAccount.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<ModuleAccount>, I>>(object: I): ModuleAccount {
const message = createBaseModuleAccount();
message.baseAccount = (object.baseAccount !== undefined && object.baseAccount !== null)
? BaseAccount.fromPartial(object.baseAccount)
: undefined;
message.name = object.name ?? "";
message.permissions = object.permissions?.map((e) => e) || [];
return message;
},
};
function createBaseModuleCredential(): ModuleCredential {
return { moduleName: "", derivationKeys: [] };
}
export const ModuleCredential: MessageFns<ModuleCredential> = {
encode(message: ModuleCredential, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.moduleName !== "") {
writer.uint32(10).string(message.moduleName);
}
for (const v of message.derivationKeys) {
writer.uint32(18).bytes(v!);
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): ModuleCredential {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseModuleCredential();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.moduleName = reader.string();
continue;
}
case 2: {
if (tag !== 18) {
break;
}
message.derivationKeys.push(reader.bytes());
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): ModuleCredential {
return {
moduleName: isSet(object.moduleName) ? globalThis.String(object.moduleName) : "",
derivationKeys: globalThis.Array.isArray(object?.derivationKeys)
? object.derivationKeys.map((e: any) => bytesFromBase64(e))
: [],
};
},
toJSON(message: ModuleCredential): unknown {
const obj: any = {};
if (message.moduleName !== "") {
obj.moduleName = message.moduleName;
}
if (message.derivationKeys?.length) {
obj.derivationKeys = message.derivationKeys.map((e) => base64FromBytes(e));
}
return obj;
},
create<I extends Exact<DeepPartial<ModuleCredential>, I>>(base?: I): ModuleCredential {
return ModuleCredential.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<ModuleCredential>, I>>(object: I): ModuleCredential {
const message = createBaseModuleCredential();
message.moduleName = object.moduleName ?? "";
message.derivationKeys = object.derivationKeys?.map((e) => e) || [];
return message;
},
};
function createBaseParams(): Params {
return {
maxMemoCharacters: Long.UZERO,
txSigLimit: Long.UZERO,
txSizeCostPerByte: Long.UZERO,
sigVerifyCostEd25519: Long.UZERO,
sigVerifyCostSecp256k1: Long.UZERO,
};
}
export const Params: MessageFns<Params> = {
encode(message: Params, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (!message.maxMemoCharacters.equals(Long.UZERO)) {
writer.uint32(8).uint64(message.maxMemoCharacters.toString());
}
if (!message.txSigLimit.equals(Long.UZERO)) {
writer.uint32(16).uint64(message.txSigLimit.toString());
}
if (!message.txSizeCostPerByte.equals(Long.UZERO)) {
writer.uint32(24).uint64(message.txSizeCostPerByte.toString());
}
if (!message.sigVerifyCostEd25519.equals(Long.UZERO)) {
writer.uint32(32).uint64(message.sigVerifyCostEd25519.toString());
}
if (!message.sigVerifyCostSecp256k1.equals(Long.UZERO)) {
writer.uint32(40).uint64(message.sigVerifyCostSecp256k1.toString());
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): Params {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseParams();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 8) {
break;
}
message.maxMemoCharacters = Long.fromString(reader.uint64().toString(), true);
continue;
}
case 2: {
if (tag !== 16) {
break;
}
message.txSigLimit = Long.fromString(reader.uint64().toString(), true);
continue;
}
case 3: {
if (tag !== 24) {
break;
}
message.txSizeCostPerByte = Long.fromString(reader.uint64().toString(), true);
continue;
}
case 4: {
if (tag !== 32) {
break;
}
message.sigVerifyCostEd25519 = Long.fromString(reader.uint64().toString(), true);
continue;
}
case 5: {
if (tag !== 40) {
break;
}
message.sigVerifyCostSecp256k1 = Long.fromString(reader.uint64().toString(), true);
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): Params {
return {
maxMemoCharacters: isSet(object.maxMemoCharacters) ? Long.fromValue(object.maxMemoCharacters) : Long.UZERO,
txSigLimit: isSet(object.txSigLimit) ? Long.fromValue(object.txSigLimit) : Long.UZERO,
txSizeCostPerByte: isSet(object.txSizeCostPerByte) ? Long.fromValue(object.txSizeCostPerByte) : Long.UZERO,
sigVerifyCostEd25519: isSet(object.sigVerifyCostEd25519)
? Long.fromValue(object.sigVerifyCostEd25519)
: Long.UZERO,
sigVerifyCostSecp256k1: isSet(object.sigVerifyCostSecp256k1)
? Long.fromValue(object.sigVerifyCostSecp256k1)
: Long.UZERO,
};
},
toJSON(message: Params): unknown {
const obj: any = {};
if (!message.maxMemoCharacters.equals(Long.UZERO)) {
obj.maxMemoCharacters = (message.maxMemoCharacters || Long.UZERO).toString();
}
if (!message.txSigLimit.equals(Long.UZERO)) {
obj.txSigLimit = (message.txSigLimit || Long.UZERO).toString();
}
if (!message.txSizeCostPerByte.equals(Long.UZERO)) {
obj.txSizeCostPerByte = (message.txSizeCostPerByte || Long.UZERO).toString();
}
if (!message.sigVerifyCostEd25519.equals(Long.UZERO)) {
obj.sigVerifyCostEd25519 = (message.sigVerifyCostEd25519 || Long.UZERO).toString();
}
if (!message.sigVerifyCostSecp256k1.equals(Long.UZERO)) {
obj.sigVerifyCostSecp256k1 = (message.sigVerifyCostSecp256k1 || Long.UZERO).toString();
}
return obj;
},
create<I extends Exact<DeepPartial<Params>, I>>(base?: I): Params {
return Params.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<Params>, I>>(object: I): Params {
const message = createBaseParams();
message.maxMemoCharacters = (object.maxMemoCharacters !== undefined && object.maxMemoCharacters !== null)
? Long.fromValue(object.maxMemoCharacters)
: Long.UZERO;
message.txSigLimit = (object.txSigLimit !== undefined && object.txSigLimit !== null)
? Long.fromValue(object.txSigLimit)
: Long.UZERO;
message.txSizeCostPerByte = (object.txSizeCostPerByte !== undefined && object.txSizeCostPerByte !== null)
? Long.fromValue(object.txSizeCostPerByte)
: Long.UZERO;
message.sigVerifyCostEd25519 = (object.sigVerifyCostEd25519 !== undefined && object.sigVerifyCostEd25519 !== null)
? Long.fromValue(object.sigVerifyCostEd25519)
: Long.UZERO;
message.sigVerifyCostSecp256k1 =
(object.sigVerifyCostSecp256k1 !== undefined && object.sigVerifyCostSecp256k1 !== null)
? Long.fromValue(object.sigVerifyCostSecp256k1)
: Long.UZERO;
return message;
},
};
function bytesFromBase64(b64: string): Uint8Array {
if ((globalThis as any).Buffer) {
return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
} else {
const bin = globalThis.atob(b64);
const arr = new Uint8Array(bin.length);
for (let i = 0; i < bin.length; ++i) {
arr[i] = bin.charCodeAt(i);
}
return arr;
}
}
function base64FromBytes(arr: Uint8Array): string {
if ((globalThis as any).Buffer) {
return globalThis.Buffer.from(arr).toString("base64");
} else {
const bin: string[] = [];
arr.forEach((byte) => {
bin.push(globalThis.String.fromCharCode(byte));
});
return globalThis.btoa(bin.join(""));
}
}
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
export type DeepPartial<T> = T extends Builtin ? T
: T extends Long ? string | number | Long : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>>
: T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
: T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> }
: Partial<T>;
type KeysOfUnion<T> = T extends T ? keyof T : never;
export type Exact<P, I extends P> = P extends Builtin ? P
: P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never };
function isSet(value: any): boolean {
return value !== null && value !== undefined;
}
export interface MessageFns<T> {
encode(message: T, writer?: BinaryWriter): BinaryWriter;
decode(input: BinaryReader | Uint8Array, length?: number): T;
fromJSON(object: any): T;
toJSON(message: T): unknown;
create<I extends Exact<DeepPartial<T>, I>>(base?: I): T;
fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T;
}

View File

@ -0,0 +1,308 @@
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.1
// protoc v3.12.4
// source: cosmos_proto/cosmos.proto
/* eslint-disable */
import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
import Long from "long";
export const protobufPackage = "cosmos_proto";
export enum ScalarType {
SCALAR_TYPE_UNSPECIFIED = 0,
SCALAR_TYPE_STRING = 1,
SCALAR_TYPE_BYTES = 2,
UNRECOGNIZED = -1,
}
export function scalarTypeFromJSON(object: any): ScalarType {
switch (object) {
case 0:
case "SCALAR_TYPE_UNSPECIFIED":
return ScalarType.SCALAR_TYPE_UNSPECIFIED;
case 1:
case "SCALAR_TYPE_STRING":
return ScalarType.SCALAR_TYPE_STRING;
case 2:
case "SCALAR_TYPE_BYTES":
return ScalarType.SCALAR_TYPE_BYTES;
case -1:
case "UNRECOGNIZED":
default:
return ScalarType.UNRECOGNIZED;
}
}
export function scalarTypeToJSON(object: ScalarType): string {
switch (object) {
case ScalarType.SCALAR_TYPE_UNSPECIFIED:
return "SCALAR_TYPE_UNSPECIFIED";
case ScalarType.SCALAR_TYPE_STRING:
return "SCALAR_TYPE_STRING";
case ScalarType.SCALAR_TYPE_BYTES:
return "SCALAR_TYPE_BYTES";
case ScalarType.UNRECOGNIZED:
default:
return "UNRECOGNIZED";
}
}
/**
* InterfaceDescriptor describes an interface type to be used with
* accepts_interface and implements_interface and declared by declare_interface.
*/
export interface InterfaceDescriptor {
/**
* name is the name of the interface. It should be a short-name (without
* a period) such that the fully qualified name of the interface will be
* package.name, ex. for the package a.b and interface named C, the
* fully-qualified name will be a.b.C.
*/
name: string;
/**
* description is a human-readable description of the interface and its
* purpose.
*/
description: string;
}
/**
* ScalarDescriptor describes an scalar type to be used with
* the scalar field option and declared by declare_scalar.
* Scalars extend simple protobuf built-in types with additional
* syntax and semantics, for instance to represent big integers.
* Scalars should ideally define an encoding such that there is only one
* valid syntactical representation for a given semantic meaning,
* i.e. the encoding should be deterministic.
*/
export interface ScalarDescriptor {
/**
* name is the name of the scalar. It should be a short-name (without
* a period) such that the fully qualified name of the scalar will be
* package.name, ex. for the package a.b and scalar named C, the
* fully-qualified name will be a.b.C.
*/
name: string;
/**
* description is a human-readable description of the scalar and its
* encoding format. For instance a big integer or decimal scalar should
* specify precisely the expected encoding format.
*/
description: string;
/**
* field_type is the type of field with which this scalar can be used.
* Scalars can be used with one and only one type of field so that
* encoding standards and simple and clear. Currently only string and
* bytes fields are supported for scalars.
*/
fieldType: ScalarType[];
}
function createBaseInterfaceDescriptor(): InterfaceDescriptor {
return { name: "", description: "" };
}
export const InterfaceDescriptor: MessageFns<InterfaceDescriptor> = {
encode(message: InterfaceDescriptor, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.name !== "") {
writer.uint32(10).string(message.name);
}
if (message.description !== "") {
writer.uint32(18).string(message.description);
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): InterfaceDescriptor {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseInterfaceDescriptor();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.name = reader.string();
continue;
}
case 2: {
if (tag !== 18) {
break;
}
message.description = reader.string();
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): InterfaceDescriptor {
return {
name: isSet(object.name) ? globalThis.String(object.name) : "",
description: isSet(object.description) ? globalThis.String(object.description) : "",
};
},
toJSON(message: InterfaceDescriptor): unknown {
const obj: any = {};
if (message.name !== "") {
obj.name = message.name;
}
if (message.description !== "") {
obj.description = message.description;
}
return obj;
},
create<I extends Exact<DeepPartial<InterfaceDescriptor>, I>>(base?: I): InterfaceDescriptor {
return InterfaceDescriptor.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<InterfaceDescriptor>, I>>(object: I): InterfaceDescriptor {
const message = createBaseInterfaceDescriptor();
message.name = object.name ?? "";
message.description = object.description ?? "";
return message;
},
};
function createBaseScalarDescriptor(): ScalarDescriptor {
return { name: "", description: "", fieldType: [] };
}
export const ScalarDescriptor: MessageFns<ScalarDescriptor> = {
encode(message: ScalarDescriptor, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.name !== "") {
writer.uint32(10).string(message.name);
}
if (message.description !== "") {
writer.uint32(18).string(message.description);
}
writer.uint32(26).fork();
for (const v of message.fieldType) {
writer.int32(v);
}
writer.join();
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): ScalarDescriptor {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseScalarDescriptor();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.name = reader.string();
continue;
}
case 2: {
if (tag !== 18) {
break;
}
message.description = reader.string();
continue;
}
case 3: {
if (tag === 24) {
message.fieldType.push(reader.int32() as any);
continue;
}
if (tag === 26) {
const end2 = reader.uint32() + reader.pos;
while (reader.pos < end2) {
message.fieldType.push(reader.int32() as any);
}
continue;
}
break;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): ScalarDescriptor {
return {
name: isSet(object.name) ? globalThis.String(object.name) : "",
description: isSet(object.description) ? globalThis.String(object.description) : "",
fieldType: globalThis.Array.isArray(object?.fieldType)
? object.fieldType.map((e: any) => scalarTypeFromJSON(e))
: [],
};
},
toJSON(message: ScalarDescriptor): unknown {
const obj: any = {};
if (message.name !== "") {
obj.name = message.name;
}
if (message.description !== "") {
obj.description = message.description;
}
if (message.fieldType?.length) {
obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e));
}
return obj;
},
create<I extends Exact<DeepPartial<ScalarDescriptor>, I>>(base?: I): ScalarDescriptor {
return ScalarDescriptor.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<ScalarDescriptor>, I>>(object: I): ScalarDescriptor {
const message = createBaseScalarDescriptor();
message.name = object.name ?? "";
message.description = object.description ?? "";
message.fieldType = object.fieldType?.map((e) => e) || [];
return message;
},
};
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
export type DeepPartial<T> = T extends Builtin ? T
: T extends Long ? string | number | Long : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>>
: T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
: T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> }
: Partial<T>;
type KeysOfUnion<T> = T extends T ? keyof T : never;
export type Exact<P, I extends P> = P extends Builtin ? P
: P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never };
function isSet(value: any): boolean {
return value !== null && value !== undefined;
}
export interface MessageFns<T> {
encode(message: T, writer?: BinaryWriter): BinaryWriter;
decode(input: BinaryReader | Uint8Array, length?: number): T;
fromJSON(object: any): T;
toJSON(message: T): unknown;
create<I extends Exact<DeepPartial<T>, I>>(base?: I): T;
fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T;
}

View File

@ -0,0 +1,9 @@
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.1
// protoc v3.12.4
// source: gogoproto/gogo.proto
/* eslint-disable */
export const protobufPackage = "gogoproto";

View File

@ -0,0 +1,260 @@
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.1
// protoc v3.12.4
// source: google/protobuf/any.proto
/* eslint-disable */
import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
import Long from "long";
export const protobufPackage = "google.protobuf";
/**
* `Any` contains an arbitrary serialized protocol buffer message along with a
* URL that describes the type of the serialized message.
*
* Protobuf library provides support to pack/unpack Any values in the form
* of utility functions or additional generated methods of the Any type.
*
* Example 1: Pack and unpack a message in C++.
*
* Foo foo = ...;
* Any any;
* any.PackFrom(foo);
* ...
* if (any.UnpackTo(&foo)) {
* ...
* }
*
* Example 2: Pack and unpack a message in Java.
*
* Foo foo = ...;
* Any any = Any.pack(foo);
* ...
* if (any.is(Foo.class)) {
* foo = any.unpack(Foo.class);
* }
* // or ...
* if (any.isSameTypeAs(Foo.getDefaultInstance())) {
* foo = any.unpack(Foo.getDefaultInstance());
* }
*
* Example 3: Pack and unpack a message in Python.
*
* foo = Foo(...)
* any = Any()
* any.Pack(foo)
* ...
* if any.Is(Foo.DESCRIPTOR):
* any.Unpack(foo)
* ...
*
* Example 4: Pack and unpack a message in Go
*
* foo := &pb.Foo{...}
* any, err := anypb.New(foo)
* if err != nil {
* ...
* }
* ...
* foo := &pb.Foo{}
* if err := any.UnmarshalTo(foo); err != nil {
* ...
* }
*
* The pack methods provided by protobuf library will by default use
* 'type.googleapis.com/full.type.name' as the type URL and the unpack
* methods only use the fully qualified type name after the last '/'
* in the type URL, for example "foo.bar.com/x/y.z" will yield type
* name "y.z".
*
* JSON
* ====
* The JSON representation of an `Any` value uses the regular
* representation of the deserialized, embedded message, with an
* additional field `@type` which contains the type URL. Example:
*
* package google.profile;
* message Person {
* string first_name = 1;
* string last_name = 2;
* }
*
* {
* "@type": "type.googleapis.com/google.profile.Person",
* "firstName": <string>,
* "lastName": <string>
* }
*
* If the embedded message type is well-known and has a custom JSON
* representation, that representation will be embedded adding a field
* `value` which holds the custom JSON in addition to the `@type`
* field. Example (for message [google.protobuf.Duration][]):
*
* {
* "@type": "type.googleapis.com/google.protobuf.Duration",
* "value": "1.212s"
* }
*/
export interface Any {
/**
* A URL/resource name that uniquely identifies the type of the serialized
* protocol buffer message. This string must contain at least
* one "/" character. The last segment of the URL's path must represent
* the fully qualified name of the type (as in
* `path/google.protobuf.Duration`). The name should be in a canonical form
* (e.g., leading "." is not accepted).
*
* In practice, teams usually precompile into the binary all types that they
* expect it to use in the context of Any. However, for URLs which use the
* scheme `http`, `https`, or no scheme, one can optionally set up a type
* server that maps type URLs to message definitions as follows:
*
* * If no scheme is provided, `https` is assumed.
* * An HTTP GET on the URL must yield a [google.protobuf.Type][]
* value in binary format, or produce an error.
* * Applications are allowed to cache lookup results based on the
* URL, or have them precompiled into a binary to avoid any
* lookup. Therefore, binary compatibility needs to be preserved
* on changes to types. (Use versioned type names to manage
* breaking changes.)
*
* Note: this functionality is not currently available in the official
* protobuf release, and it is not used for type URLs beginning with
* type.googleapis.com. As of May 2023, there are no widely used type server
* implementations and no plans to implement one.
*
* Schemes other than `http`, `https` (or the empty scheme) might be
* used with implementation specific semantics.
*/
typeUrl: string;
/** Must be a valid serialized protocol buffer of the above specified type. */
value: Uint8Array;
}
function createBaseAny(): Any {
return { typeUrl: "", value: new Uint8Array(0) };
}
export const Any: MessageFns<Any> = {
encode(message: Any, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.typeUrl !== "") {
writer.uint32(10).string(message.typeUrl);
}
if (message.value.length !== 0) {
writer.uint32(18).bytes(message.value);
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): Any {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseAny();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.typeUrl = reader.string();
continue;
}
case 2: {
if (tag !== 18) {
break;
}
message.value = reader.bytes();
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): Any {
return {
typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "",
value: isSet(object.value) ? bytesFromBase64(object.value) : new Uint8Array(0),
};
},
toJSON(message: Any): unknown {
const obj: any = {};
if (message.typeUrl !== "") {
obj.typeUrl = message.typeUrl;
}
if (message.value.length !== 0) {
obj.value = base64FromBytes(message.value);
}
return obj;
},
create<I extends Exact<DeepPartial<Any>, I>>(base?: I): Any {
return Any.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<Any>, I>>(object: I): Any {
const message = createBaseAny();
message.typeUrl = object.typeUrl ?? "";
message.value = object.value ?? new Uint8Array(0);
return message;
},
};
function bytesFromBase64(b64: string): Uint8Array {
if ((globalThis as any).Buffer) {
return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
} else {
const bin = globalThis.atob(b64);
const arr = new Uint8Array(bin.length);
for (let i = 0; i < bin.length; ++i) {
arr[i] = bin.charCodeAt(i);
}
return arr;
}
}
function base64FromBytes(arr: Uint8Array): string {
if ((globalThis as any).Buffer) {
return globalThis.Buffer.from(arr).toString("base64");
} else {
const bin: string[] = [];
arr.forEach((byte) => {
bin.push(globalThis.String.fromCharCode(byte));
});
return globalThis.btoa(bin.join(""));
}
}
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
export type DeepPartial<T> = T extends Builtin ? T
: T extends Long ? string | number | Long : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>>
: T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
: T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> }
: Partial<T>;
type KeysOfUnion<T> = T extends T ? keyof T : never;
export type Exact<P, I extends P> = P extends Builtin ? P
: P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never };
function isSet(value: any): boolean {
return value !== null && value !== undefined;
}
export interface MessageFns<T> {
encode(message: T, writer?: BinaryWriter): BinaryWriter;
decode(input: BinaryReader | Uint8Array, length?: number): T;
fromJSON(object: any): T;
toJSON(message: T): unknown;
create<I extends Exact<DeepPartial<T>, I>>(base?: I): T;
fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T;
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,239 @@
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.1
// protoc v3.12.4
// source: laconic/types/v1/account.proto
/* eslint-disable */
import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
import Long from "long";
import { BaseAccount } from "../../../cosmos/auth/v1beta1/auth";
export const protobufPackage = "laconic.types.v1";
/** ZenithAccount defines an account storing Azimuth identity */
export interface ZenithAccount {
baseAccount?: BaseAccount | undefined;
azimuthId: string;
ownedPoints?: AccountOwnedPoints | undefined;
}
/**
* AccountOwnedPoints defines the set of owned points (galaxy and star) for a
* zenith account
*/
export interface AccountOwnedPoints {
galaxy: Long;
stars: Long[];
}
function createBaseZenithAccount(): ZenithAccount {
return { baseAccount: undefined, azimuthId: "", ownedPoints: undefined };
}
export const ZenithAccount: MessageFns<ZenithAccount> = {
encode(message: ZenithAccount, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.baseAccount !== undefined) {
BaseAccount.encode(message.baseAccount, writer.uint32(10).fork()).join();
}
if (message.azimuthId !== "") {
writer.uint32(18).string(message.azimuthId);
}
if (message.ownedPoints !== undefined) {
AccountOwnedPoints.encode(message.ownedPoints, writer.uint32(26).fork()).join();
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): ZenithAccount {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseZenithAccount();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.baseAccount = BaseAccount.decode(reader, reader.uint32());
continue;
}
case 2: {
if (tag !== 18) {
break;
}
message.azimuthId = reader.string();
continue;
}
case 3: {
if (tag !== 26) {
break;
}
message.ownedPoints = AccountOwnedPoints.decode(reader, reader.uint32());
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): ZenithAccount {
return {
baseAccount: isSet(object.baseAccount) ? BaseAccount.fromJSON(object.baseAccount) : undefined,
azimuthId: isSet(object.azimuthId) ? globalThis.String(object.azimuthId) : "",
ownedPoints: isSet(object.ownedPoints) ? AccountOwnedPoints.fromJSON(object.ownedPoints) : undefined,
};
},
toJSON(message: ZenithAccount): unknown {
const obj: any = {};
if (message.baseAccount !== undefined) {
obj.baseAccount = BaseAccount.toJSON(message.baseAccount);
}
if (message.azimuthId !== "") {
obj.azimuthId = message.azimuthId;
}
if (message.ownedPoints !== undefined) {
obj.ownedPoints = AccountOwnedPoints.toJSON(message.ownedPoints);
}
return obj;
},
create<I extends Exact<DeepPartial<ZenithAccount>, I>>(base?: I): ZenithAccount {
return ZenithAccount.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<ZenithAccount>, I>>(object: I): ZenithAccount {
const message = createBaseZenithAccount();
message.baseAccount = (object.baseAccount !== undefined && object.baseAccount !== null)
? BaseAccount.fromPartial(object.baseAccount)
: undefined;
message.azimuthId = object.azimuthId ?? "";
message.ownedPoints = (object.ownedPoints !== undefined && object.ownedPoints !== null)
? AccountOwnedPoints.fromPartial(object.ownedPoints)
: undefined;
return message;
},
};
function createBaseAccountOwnedPoints(): AccountOwnedPoints {
return { galaxy: Long.ZERO, stars: [] };
}
export const AccountOwnedPoints: MessageFns<AccountOwnedPoints> = {
encode(message: AccountOwnedPoints, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (!message.galaxy.equals(Long.ZERO)) {
writer.uint32(8).int64(message.galaxy.toString());
}
writer.uint32(18).fork();
for (const v of message.stars) {
writer.int64(v.toString());
}
writer.join();
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): AccountOwnedPoints {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseAccountOwnedPoints();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 8) {
break;
}
message.galaxy = Long.fromString(reader.int64().toString());
continue;
}
case 2: {
if (tag === 16) {
message.stars.push(Long.fromString(reader.int64().toString()));
continue;
}
if (tag === 18) {
const end2 = reader.uint32() + reader.pos;
while (reader.pos < end2) {
message.stars.push(Long.fromString(reader.int64().toString()));
}
continue;
}
break;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): AccountOwnedPoints {
return {
galaxy: isSet(object.galaxy) ? Long.fromValue(object.galaxy) : Long.ZERO,
stars: globalThis.Array.isArray(object?.stars) ? object.stars.map((e: any) => Long.fromValue(e)) : [],
};
},
toJSON(message: AccountOwnedPoints): unknown {
const obj: any = {};
if (!message.galaxy.equals(Long.ZERO)) {
obj.galaxy = (message.galaxy || Long.ZERO).toString();
}
if (message.stars?.length) {
obj.stars = message.stars.map((e) => (e || Long.ZERO).toString());
}
return obj;
},
create<I extends Exact<DeepPartial<AccountOwnedPoints>, I>>(base?: I): AccountOwnedPoints {
return AccountOwnedPoints.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<AccountOwnedPoints>, I>>(object: I): AccountOwnedPoints {
const message = createBaseAccountOwnedPoints();
message.galaxy = (object.galaxy !== undefined && object.galaxy !== null)
? Long.fromValue(object.galaxy)
: Long.ZERO;
message.stars = object.stars?.map((e) => Long.fromValue(e)) || [];
return message;
},
};
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
export type DeepPartial<T> = T extends Builtin ? T
: T extends Long ? string | number | Long : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>>
: T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
: T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> }
: Partial<T>;
type KeysOfUnion<T> = T extends T ? keyof T : never;
export type Exact<P, I extends P> = P extends Builtin ? P
: P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never };
function isSet(value: any): boolean {
return value !== null && value !== undefined;
}
export interface MessageFns<T> {
encode(message: T, writer?: BinaryWriter): BinaryWriter;
decode(input: BinaryReader | Uint8Array, length?: number): T;
fromJSON(object: any): T;
toJSON(message: T): unknown;
create<I extends Exact<DeepPartial<T>, I>>(base?: I): T;
fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T;
}

View File

@ -0,0 +1,766 @@
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
// versions:
// protoc-gen-ts_proto v2.7.1
// protoc v3.12.4
// source: laconic/zenith/v1/zenith.proto
/* eslint-disable */
import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
import Long from "long";
export const protobufPackage = "laconic.zenith.v1";
/** OwnerChange defines an owner changed event */
export interface OwnerChange {
point: number;
owner: string;
}
/**
* List of owner change events
* Value type to be used in OwnerChangesCache
*/
export interface OwnerChangesList {
value: OwnerChange[];
}
/**
* StalledEthHeightSlashingInfo consists of information required to slash
* validators progressively when eth height updation is stalled
*/
export interface StalledEthHeightSlashingInfo {
startHeight: Long;
validatorPowerMap: { [key: string]: Long };
}
export interface StalledEthHeightSlashingInfo_ValidatorPowerMapEntry {
key: string;
value: Long;
}
/** Attestation represents the attestation payload signed by validators in gentx */
export interface Attestation {
payload?: Payload | undefined;
signatures: string[];
}
/**
* Payload is the outer payload having Azimuth ID (address); signed with zenithd
* account
*/
export interface Payload {
address: string;
msg: string;
payload?: InnerPayload | undefined;
}
/**
* InnerPayload is the inner payload having zenith address; signed with the
* Ethereum account
*/
export interface InnerPayload {
address: string;
msg: string;
ownedPoints?: OwnedPoints | undefined;
}
/** OwnedPoints is the set of `@p` names of owned galaxy and star */
export interface OwnedPoints {
galaxy: string;
stars: string[];
}
function createBaseOwnerChange(): OwnerChange {
return { point: 0, owner: "" };
}
export const OwnerChange: MessageFns<OwnerChange> = {
encode(message: OwnerChange, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.point !== 0) {
writer.uint32(8).uint32(message.point);
}
if (message.owner !== "") {
writer.uint32(18).string(message.owner);
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): OwnerChange {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseOwnerChange();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 8) {
break;
}
message.point = reader.uint32();
continue;
}
case 2: {
if (tag !== 18) {
break;
}
message.owner = reader.string();
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): OwnerChange {
return {
point: isSet(object.point) ? globalThis.Number(object.point) : 0,
owner: isSet(object.owner) ? globalThis.String(object.owner) : "",
};
},
toJSON(message: OwnerChange): unknown {
const obj: any = {};
if (message.point !== 0) {
obj.point = Math.round(message.point);
}
if (message.owner !== "") {
obj.owner = message.owner;
}
return obj;
},
create<I extends Exact<DeepPartial<OwnerChange>, I>>(base?: I): OwnerChange {
return OwnerChange.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<OwnerChange>, I>>(object: I): OwnerChange {
const message = createBaseOwnerChange();
message.point = object.point ?? 0;
message.owner = object.owner ?? "";
return message;
},
};
function createBaseOwnerChangesList(): OwnerChangesList {
return { value: [] };
}
export const OwnerChangesList: MessageFns<OwnerChangesList> = {
encode(message: OwnerChangesList, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
for (const v of message.value) {
OwnerChange.encode(v!, writer.uint32(10).fork()).join();
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): OwnerChangesList {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseOwnerChangesList();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.value.push(OwnerChange.decode(reader, reader.uint32()));
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): OwnerChangesList {
return {
value: globalThis.Array.isArray(object?.value) ? object.value.map((e: any) => OwnerChange.fromJSON(e)) : [],
};
},
toJSON(message: OwnerChangesList): unknown {
const obj: any = {};
if (message.value?.length) {
obj.value = message.value.map((e) => OwnerChange.toJSON(e));
}
return obj;
},
create<I extends Exact<DeepPartial<OwnerChangesList>, I>>(base?: I): OwnerChangesList {
return OwnerChangesList.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<OwnerChangesList>, I>>(object: I): OwnerChangesList {
const message = createBaseOwnerChangesList();
message.value = object.value?.map((e) => OwnerChange.fromPartial(e)) || [];
return message;
},
};
function createBaseStalledEthHeightSlashingInfo(): StalledEthHeightSlashingInfo {
return { startHeight: Long.ZERO, validatorPowerMap: {} };
}
export const StalledEthHeightSlashingInfo: MessageFns<StalledEthHeightSlashingInfo> = {
encode(message: StalledEthHeightSlashingInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (!message.startHeight.equals(Long.ZERO)) {
writer.uint32(8).int64(message.startHeight.toString());
}
Object.entries(message.validatorPowerMap).forEach(([key, value]) => {
StalledEthHeightSlashingInfo_ValidatorPowerMapEntry.encode({ key: key as any, value }, writer.uint32(18).fork())
.join();
});
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): StalledEthHeightSlashingInfo {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseStalledEthHeightSlashingInfo();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 8) {
break;
}
message.startHeight = Long.fromString(reader.int64().toString());
continue;
}
case 2: {
if (tag !== 18) {
break;
}
const entry2 = StalledEthHeightSlashingInfo_ValidatorPowerMapEntry.decode(reader, reader.uint32());
if (entry2.value !== undefined) {
message.validatorPowerMap[entry2.key] = entry2.value;
}
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): StalledEthHeightSlashingInfo {
return {
startHeight: isSet(object.startHeight) ? Long.fromValue(object.startHeight) : Long.ZERO,
validatorPowerMap: isObject(object.validatorPowerMap)
? Object.entries(object.validatorPowerMap).reduce<{ [key: string]: Long }>((acc, [key, value]) => {
acc[key] = Long.fromValue(value as Long | string);
return acc;
}, {})
: {},
};
},
toJSON(message: StalledEthHeightSlashingInfo): unknown {
const obj: any = {};
if (!message.startHeight.equals(Long.ZERO)) {
obj.startHeight = (message.startHeight || Long.ZERO).toString();
}
if (message.validatorPowerMap) {
const entries = Object.entries(message.validatorPowerMap);
if (entries.length > 0) {
obj.validatorPowerMap = {};
entries.forEach(([k, v]) => {
obj.validatorPowerMap[k] = v.toString();
});
}
}
return obj;
},
create<I extends Exact<DeepPartial<StalledEthHeightSlashingInfo>, I>>(base?: I): StalledEthHeightSlashingInfo {
return StalledEthHeightSlashingInfo.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<StalledEthHeightSlashingInfo>, I>>(object: I): StalledEthHeightSlashingInfo {
const message = createBaseStalledEthHeightSlashingInfo();
message.startHeight = (object.startHeight !== undefined && object.startHeight !== null)
? Long.fromValue(object.startHeight)
: Long.ZERO;
message.validatorPowerMap = Object.entries(object.validatorPowerMap ?? {}).reduce<{ [key: string]: Long }>(
(acc, [key, value]) => {
if (value !== undefined) {
acc[key] = Long.fromValue(value);
}
return acc;
},
{},
);
return message;
},
};
function createBaseStalledEthHeightSlashingInfo_ValidatorPowerMapEntry(): StalledEthHeightSlashingInfo_ValidatorPowerMapEntry {
return { key: "", value: Long.ZERO };
}
export const StalledEthHeightSlashingInfo_ValidatorPowerMapEntry: MessageFns<
StalledEthHeightSlashingInfo_ValidatorPowerMapEntry
> = {
encode(
message: StalledEthHeightSlashingInfo_ValidatorPowerMapEntry,
writer: BinaryWriter = new BinaryWriter(),
): BinaryWriter {
if (message.key !== "") {
writer.uint32(10).string(message.key);
}
if (!message.value.equals(Long.ZERO)) {
writer.uint32(16).int64(message.value.toString());
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): StalledEthHeightSlashingInfo_ValidatorPowerMapEntry {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseStalledEthHeightSlashingInfo_ValidatorPowerMapEntry();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.key = reader.string();
continue;
}
case 2: {
if (tag !== 16) {
break;
}
message.value = Long.fromString(reader.int64().toString());
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): StalledEthHeightSlashingInfo_ValidatorPowerMapEntry {
return {
key: isSet(object.key) ? globalThis.String(object.key) : "",
value: isSet(object.value) ? Long.fromValue(object.value) : Long.ZERO,
};
},
toJSON(message: StalledEthHeightSlashingInfo_ValidatorPowerMapEntry): unknown {
const obj: any = {};
if (message.key !== "") {
obj.key = message.key;
}
if (!message.value.equals(Long.ZERO)) {
obj.value = (message.value || Long.ZERO).toString();
}
return obj;
},
create<I extends Exact<DeepPartial<StalledEthHeightSlashingInfo_ValidatorPowerMapEntry>, I>>(
base?: I,
): StalledEthHeightSlashingInfo_ValidatorPowerMapEntry {
return StalledEthHeightSlashingInfo_ValidatorPowerMapEntry.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<StalledEthHeightSlashingInfo_ValidatorPowerMapEntry>, I>>(
object: I,
): StalledEthHeightSlashingInfo_ValidatorPowerMapEntry {
const message = createBaseStalledEthHeightSlashingInfo_ValidatorPowerMapEntry();
message.key = object.key ?? "";
message.value = (object.value !== undefined && object.value !== null) ? Long.fromValue(object.value) : Long.ZERO;
return message;
},
};
function createBaseAttestation(): Attestation {
return { payload: undefined, signatures: [] };
}
export const Attestation: MessageFns<Attestation> = {
encode(message: Attestation, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.payload !== undefined) {
Payload.encode(message.payload, writer.uint32(10).fork()).join();
}
for (const v of message.signatures) {
writer.uint32(18).string(v!);
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): Attestation {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseAttestation();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.payload = Payload.decode(reader, reader.uint32());
continue;
}
case 2: {
if (tag !== 18) {
break;
}
message.signatures.push(reader.string());
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): Attestation {
return {
payload: isSet(object.payload) ? Payload.fromJSON(object.payload) : undefined,
signatures: globalThis.Array.isArray(object?.signatures)
? object.signatures.map((e: any) => globalThis.String(e))
: [],
};
},
toJSON(message: Attestation): unknown {
const obj: any = {};
if (message.payload !== undefined) {
obj.payload = Payload.toJSON(message.payload);
}
if (message.signatures?.length) {
obj.signatures = message.signatures;
}
return obj;
},
create<I extends Exact<DeepPartial<Attestation>, I>>(base?: I): Attestation {
return Attestation.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<Attestation>, I>>(object: I): Attestation {
const message = createBaseAttestation();
message.payload = (object.payload !== undefined && object.payload !== null)
? Payload.fromPartial(object.payload)
: undefined;
message.signatures = object.signatures?.map((e) => e) || [];
return message;
},
};
function createBasePayload(): Payload {
return { address: "", msg: "", payload: undefined };
}
export const Payload: MessageFns<Payload> = {
encode(message: Payload, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.address !== "") {
writer.uint32(10).string(message.address);
}
if (message.msg !== "") {
writer.uint32(18).string(message.msg);
}
if (message.payload !== undefined) {
InnerPayload.encode(message.payload, writer.uint32(26).fork()).join();
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): Payload {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBasePayload();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.address = reader.string();
continue;
}
case 2: {
if (tag !== 18) {
break;
}
message.msg = reader.string();
continue;
}
case 3: {
if (tag !== 26) {
break;
}
message.payload = InnerPayload.decode(reader, reader.uint32());
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): Payload {
return {
address: isSet(object.address) ? globalThis.String(object.address) : "",
msg: isSet(object.msg) ? globalThis.String(object.msg) : "",
payload: isSet(object.payload) ? InnerPayload.fromJSON(object.payload) : undefined,
};
},
toJSON(message: Payload): unknown {
const obj: any = {};
if (message.address !== "") {
obj.address = message.address;
}
if (message.msg !== "") {
obj.msg = message.msg;
}
if (message.payload !== undefined) {
obj.payload = InnerPayload.toJSON(message.payload);
}
return obj;
},
create<I extends Exact<DeepPartial<Payload>, I>>(base?: I): Payload {
return Payload.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<Payload>, I>>(object: I): Payload {
const message = createBasePayload();
message.address = object.address ?? "";
message.msg = object.msg ?? "";
message.payload = (object.payload !== undefined && object.payload !== null)
? InnerPayload.fromPartial(object.payload)
: undefined;
return message;
},
};
function createBaseInnerPayload(): InnerPayload {
return { address: "", msg: "", ownedPoints: undefined };
}
export const InnerPayload: MessageFns<InnerPayload> = {
encode(message: InnerPayload, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.address !== "") {
writer.uint32(10).string(message.address);
}
if (message.msg !== "") {
writer.uint32(18).string(message.msg);
}
if (message.ownedPoints !== undefined) {
OwnedPoints.encode(message.ownedPoints, writer.uint32(26).fork()).join();
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): InnerPayload {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseInnerPayload();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.address = reader.string();
continue;
}
case 2: {
if (tag !== 18) {
break;
}
message.msg = reader.string();
continue;
}
case 3: {
if (tag !== 26) {
break;
}
message.ownedPoints = OwnedPoints.decode(reader, reader.uint32());
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): InnerPayload {
return {
address: isSet(object.address) ? globalThis.String(object.address) : "",
msg: isSet(object.msg) ? globalThis.String(object.msg) : "",
ownedPoints: isSet(object.ownedPoints) ? OwnedPoints.fromJSON(object.ownedPoints) : undefined,
};
},
toJSON(message: InnerPayload): unknown {
const obj: any = {};
if (message.address !== "") {
obj.address = message.address;
}
if (message.msg !== "") {
obj.msg = message.msg;
}
if (message.ownedPoints !== undefined) {
obj.ownedPoints = OwnedPoints.toJSON(message.ownedPoints);
}
return obj;
},
create<I extends Exact<DeepPartial<InnerPayload>, I>>(base?: I): InnerPayload {
return InnerPayload.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<InnerPayload>, I>>(object: I): InnerPayload {
const message = createBaseInnerPayload();
message.address = object.address ?? "";
message.msg = object.msg ?? "";
message.ownedPoints = (object.ownedPoints !== undefined && object.ownedPoints !== null)
? OwnedPoints.fromPartial(object.ownedPoints)
: undefined;
return message;
},
};
function createBaseOwnedPoints(): OwnedPoints {
return { galaxy: "", stars: [] };
}
export const OwnedPoints: MessageFns<OwnedPoints> = {
encode(message: OwnedPoints, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
if (message.galaxy !== "") {
writer.uint32(10).string(message.galaxy);
}
for (const v of message.stars) {
writer.uint32(18).string(v!);
}
return writer;
},
decode(input: BinaryReader | Uint8Array, length?: number): OwnedPoints {
const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBaseOwnedPoints();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1: {
if (tag !== 10) {
break;
}
message.galaxy = reader.string();
continue;
}
case 2: {
if (tag !== 18) {
break;
}
message.stars.push(reader.string());
continue;
}
}
if ((tag & 7) === 4 || tag === 0) {
break;
}
reader.skip(tag & 7);
}
return message;
},
fromJSON(object: any): OwnedPoints {
return {
galaxy: isSet(object.galaxy) ? globalThis.String(object.galaxy) : "",
stars: globalThis.Array.isArray(object?.stars) ? object.stars.map((e: any) => globalThis.String(e)) : [],
};
},
toJSON(message: OwnedPoints): unknown {
const obj: any = {};
if (message.galaxy !== "") {
obj.galaxy = message.galaxy;
}
if (message.stars?.length) {
obj.stars = message.stars;
}
return obj;
},
create<I extends Exact<DeepPartial<OwnedPoints>, I>>(base?: I): OwnedPoints {
return OwnedPoints.fromPartial(base ?? ({} as any));
},
fromPartial<I extends Exact<DeepPartial<OwnedPoints>, I>>(object: I): OwnedPoints {
const message = createBaseOwnedPoints();
message.galaxy = object.galaxy ?? "";
message.stars = object.stars?.map((e) => e) || [];
return message;
},
};
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
export type DeepPartial<T> = T extends Builtin ? T
: T extends Long ? string | number | Long : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>>
: T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>>
: T extends {} ? { [K in keyof T]?: DeepPartial<T[K]> }
: Partial<T>;
type KeysOfUnion<T> = T extends T ? keyof T : never;
export type Exact<P, I extends P> = P extends Builtin ? P
: P & { [K in keyof P]: Exact<P[K], I[K]> } & { [K in Exclude<keyof I, KeysOfUnion<P>>]: never };
function isObject(value: any): boolean {
return typeof value === "object" && value !== null;
}
function isSet(value: any): boolean {
return value !== null && value !== undefined;
}
export interface MessageFns<T> {
encode(message: T, writer?: BinaryWriter): BinaryWriter;
decode(input: BinaryReader | Uint8Array, length?: number): T;
fromJSON(object: any): T;
toJSON(message: T): unknown;
create<I extends Exact<DeepPartial<T>, I>>(base?: I): T;
fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T;
}

View File

@ -7,7 +7,6 @@ services:
environment:
DGRAPH_DOMAIN: ${DGRAPH_DOMAIN:-http://alpha:8080}
NEXT_PUBLIC_MULTICHAIN: ${NEXT_PUBLIC_MULTICHAIN}
NEXT_PUBLIC_REGISTRY_ENABLED_CHAINS: ${NEXT_PUBLIC_REGISTRY_ENABLED_CHAINS}
NEXT_PUBLIC_REGISTRY_NAME: ${NEXT_PUBLIC_REGISTRY_NAME}
NEXT_PUBLIC_LOGO: ${NEXT_PUBLIC_LOGO}
NEXT_PUBLIC_CHAIN_ID: ${NEXT_PUBLIC_CHAIN_ID}

View File

@ -8,7 +8,6 @@ fi
# Export environment variables
export DGRAPH_URL="${DGRAPH_DOMAIN}/graphql"
export NEXT_PUBLIC_MULTICHAIN="${NEXT_PUBLIC_MULTICHAIN}"
export NEXT_PUBLIC_REGISTRY_ENABLED_CHAINS="${NEXT_PUBLIC_REGISTRY_ENABLED_CHAINS}"
export NEXT_PUBLIC_REGISTRY_NAME="${NEXT_PUBLIC_REGISTRY_NAME}"
export NEXT_PUBLIC_LOGO="${NEXT_PUBLIC_LOGO}"
export NEXT_PUBLIC_CHAIN_ID="${NEXT_PUBLIC_CHAIN_ID}"
@ -26,7 +25,6 @@ echo "Using the following env variables:"
echo "DGRAPH_DOMAIN: ${DGRAPH_DOMAIN}"
echo "DGRAPH_URL: ${DGRAPH_URL}"
echo "NEXT_PUBLIC_MULTICHAIN: ${NEXT_PUBLIC_MULTICHAIN}"
echo "NEXT_PUBLIC_REGISTRY_ENABLED_CHAINS: ${NEXT_PUBLIC_REGISTRY_ENABLED_CHAINS}"
echo "NEXT_PUBLIC_REGISTRY_NAME: ${NEXT_PUBLIC_REGISTRY_NAME}"
echo "NEXT_PUBLIC_LOGO: ${NEXT_PUBLIC_LOGO}"
echo "NEXT_PUBLIC_CHAIN_ID: ${NEXT_PUBLIC_CHAIN_ID}"

View File

@ -52,9 +52,6 @@ Instructions for running the `cosmos-multisig-ui` using [laconic-so](https://git
# Allow multiple networks/chains in app
NEXT_PUBLIC_MULTICHAIN=
# List of public chains to show in app / or leave empty to show all chains
NEXT_PUBLIC_REGISTRY_ENABLED_CHAINS=
# Name of the chain registry
NEXT_PUBLIC_REGISTRY_NAME=

71
utils/cosmos-client.ts.ts Normal file
View File

@ -0,0 +1,71 @@
import { Any } from "cosmjs-types/google/protobuf/any";
import { Account, StargateClient } from "@cosmjs/stargate";
import { Uint64 } from "@cosmjs/math";
import { assert } from "@cosmjs/utils";
import { decodeOptionalPubkey } from "@cosmjs/proto-signing";
import { Comet38Client } from "@cosmjs/tendermint-rpc";
import { ZenithAccount } from "../src/proto/laconic/types/v1/account";
import { BaseAccount, ModuleAccount } from "../src/proto/cosmos/auth/v1beta1/auth";
function uint64FromProto(input: Long): Uint64 {
return Uint64.fromString(input.toString());
}
function accountFromBaseAccount(input: BaseAccount): Account {
const { address, pubKey, accountNumber, sequence } = input;
const pubkey = decodeOptionalPubkey(pubKey);
return {
address: address,
pubkey: pubkey,
accountNumber: uint64FromProto(accountNumber).toNumber(),
sequence: uint64FromProto(sequence).toNumber(),
};
}
// Reference: https://github.com/cosmos/cosmjs/blob/v0.33.1/packages/stargate/src/accounts.ts#L48
export function accountFromAny(input: Any): Account {
const { typeUrl, value } = input;
switch (typeUrl) {
case "/cosmos.auth.v1beta1.BaseAccount":
return accountFromBaseAccount(BaseAccount.decode(value));
case "/cosmos.auth.v1beta1.ModuleAccount": {
const baseAccount = ModuleAccount.decode(value).baseAccount;
assert(baseAccount);
return accountFromBaseAccount(baseAccount);
}
case "/laconic.types.v1.ZenithAccount": {
const baseAccount = ZenithAccount.decode(value).baseAccount;
assert(baseAccount);
return accountFromBaseAccount(baseAccount);
}
default:
throw new Error(`Unsupported type: '${typeUrl}'`);
}
}
export class SigningZenithClient extends StargateClient {
static async connect(endpoint: string, options = {}) {
const cometClient = await Comet38Client.connect(endpoint);
return new SigningZenithClient(cometClient, options);
}
public async getAccount(searchAddress: string) {
try {
const account = await this.forceGetQueryClient().auth.account(searchAddress);
return account ? accountFromAny(account) : null;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
catch (error: any) {
if (/rpc error: code = NotFound/i.test(error.toString())) {
return {
accountNumber: 0,
sequence: 0,
address: searchAddress
} as Account;
}
throw error;
}
}
}