Compare commits

..

35 Commits

Author SHA1 Message Date
0xmuralik
bbb6d2a7ab Merge branch 'main' of github.com:cerc-io/laconicd into murali/record-attributes 2023-03-16 10:39:40 +05:30
0xmuralik
656b8dc79b name tag for ref 2023-03-09 16:05:53 +05:30
0xmuralik
f827ea46b8 Revert "revert to '/' for hash ref"
This reverts commit beb1ad2df7.
2023-03-09 10:34:59 +05:30
0xmuralik
5cd07d9104 Revert "fix tests"
This reverts commit b90bb05b3f.
2023-03-09 10:34:47 +05:30
0xmuralik
30cbba44e7 Revert "cli test"
This reverts commit b59eb6878d.
2023-03-09 10:34:31 +05:30
0xmuralik
b59eb6878d cli test 2023-03-02 15:59:59 +05:30
0xmuralik
b90bb05b3f fix tests 2023-03-02 12:14:29 +05:30
0xmuralik
beb1ad2df7 revert to '/' for hash ref 2023-03-02 11:24:15 +05:30
0xmuralik
6025fadc71 Merge branch 'main' of github.com:cerc-io/laconicd into murali/record-attributes 2023-02-27 13:39:04 +05:30
0xmuralik
fa406be65f fix grpc query unit test 2023-01-30 17:34:43 +05:30
0xmuralik
e18541523d remove map and use HashRef obj 2023-01-30 15:46:12 +05:30
0xmuralik
4d5568bf83 gosec 2023-01-20 16:20:46 +05:30
0xmuralik
ff10aac7d7 unit tests 2023-01-20 15:06:13 +05:30
0xmuralik
38c77c10f6 imporve insert attributes logic 2023-01-20 12:14:33 +05:30
0xmuralik
bd04a6307e remove duplicate examples 2023-01-20 10:24:38 +05:30
0xmuralik
6e272334e4 edit variable names 2023-01-20 10:19:48 +05:30
0xmuralik
bf1670be78 payload to record attributes 2023-01-19 15:44:56 +05:30
0xmuralik
67208b08af type in git repo type 2023-01-19 15:31:08 +05:30
0xmuralik
2e986c4d5e fix serviceproviderrecord name 2023-01-19 13:05:56 +05:30
0xmuralik
e38ed67657 remove few types and proto-gen 2023-01-19 12:57:10 +05:30
0xmuralik
145d52f92e mock examples 2023-01-18 15:41:14 +05:30
0xmuralik
4ba723a384 WebsiteRegistrationRecord 2023-01-17 12:03:48 +05:30
0xmuralik
420042b9de hash reference 2023-01-17 11:55:08 +05:30
0xmuralik
d44ab22d87 delete duplicate code 2023-01-11 16:34:20 +05:30
0xmuralik
66cf0f758c proto for remaining record types 2023-01-11 16:23:29 +05:30
0xmuralik
c3774a3b96 merge from main 2023-01-11 15:14:05 +05:30
0xmuralik
39038980cc record names 2023-01-03 14:28:58 +05:30
0xmuralik
26300b0738 versioning record attributes and http post rules for rpc messages 2023-01-02 17:45:52 +05:30
0xmuralik
021ab7df1f patch typed data 2023-01-02 15:44:15 +05:30
0xmuralik
6eeed3de91 hardcode record attributes 2022-12-28 16:13:23 +05:30
0xmuralik
9e91a70f31 patch message types 2022-12-27 12:21:49 +05:30
0xmuralik
961f0f737a patch setRecord in WrapTxToTypedData 2022-12-26 10:59:36 +05:30
0xmuralik
930cca939f gofmpt 2022-12-21 14:56:05 +05:30
0xmuralik
e8fe95114a unpackInterfaces 2022-12-21 13:26:34 +05:30
0xmuralik
82b2c32575 unpack interface 2022-12-19 15:11:52 +05:30
103 changed files with 10106 additions and 8923 deletions

View File

@ -1,7 +1,5 @@
Dockerfile # localnet-setup
localnet-setup
**/node_modules # build
init.sh build
build
localnet-setup

View File

@ -1,17 +0,0 @@
name: Build
on:
pull_request:
branches:
- main
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version: 1.21
check-latest: true
- run: |
make build

View File

@ -1,20 +0,0 @@
name: Deploy Contract
on:
pull_request:
branches:
- main
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version: 1.21
check-latest: true
- name: Test contract
run: |
make contract-tools
# This seems to be an empty placeholder.
make test-contract

View File

@ -1,21 +0,0 @@
name: Tests
on:
pull_request:
push:
branches:
- main
- release/**
jobs:
test-importer:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/setup-go@v3
with:
go-version: 1.21
check-latest: true
- uses: actions/checkout@v3
- name: test-importer
run: |
make test-import

View File

@ -1,21 +0,0 @@
name: Tests
on:
pull_request:
push:
branches:
- main
- release/**
jobs:
test-rpc:
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- uses: actions/setup-go@v3
with:
go-version: 1.21
check-latest: true
- uses: actions/checkout@v3
- name: Test rpc endpoint
run: |
make test-rpc

View File

@ -1,40 +0,0 @@
name: Tests
on:
pull_request:
push:
branches:
- main
- release/**
jobs:
sdk_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Checkout laconic-sdk
uses: actions/checkout@v3
with:
path: "./laconic-sdk/"
repository: cerc-io/laconic-sdk
fetch-depth: 0
ref: main
- name: Environment
run: ls -tlh && env
- name: Build laconicd container
working-directory: tests/sdk_tests
run: ./build-laconicd-container.sh
- name: Build laconic-sdk container
working-directory: laconic-sdk
run: ./scripts/build-sdk-test-container.sh
- name: Start containers (auctions enabled)
working-directory: tests/sdk_tests
env:
TEST_AUCTION_ENABLED: true
run: docker compose up -d
- name: Run auction tests
working-directory: tests/sdk_tests
run: ./run-tests.sh test:auctions

View File

@ -1,40 +0,0 @@
name: Tests
on:
pull_request:
push:
branches:
- main
- release/**
jobs:
sdk_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Checkout laconic-sdk
uses: actions/checkout@v3
with:
path: "./laconic-sdk/"
repository: cerc-io/laconic-sdk
fetch-depth: 0
ref: main
- name: Environment
run: ls -tlh && env
- name: Build laconicd container
working-directory: tests/sdk_tests
run: ./build-laconicd-container.sh
- name: Build laconic-sdk container
working-directory: laconic-sdk
run: ./scripts/build-sdk-test-container.sh
- name: Start containers (expiry enabled)
working-directory: tests/sdk_tests
env:
TEST_REGISTRY_EXPIRY: true
run: docker compose up -d
- name: Run nameservice expiry tests
working-directory: tests/sdk_tests
run: ./run-tests.sh test:nameservice-expiry

View File

@ -1,38 +0,0 @@
name: Tests
on:
pull_request:
push:
branches:
- main
- release/**
jobs:
sdk_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Checkout laconic-sdk
uses: actions/checkout@v3
with:
path: "./laconic-sdk/"
repository: cerc-io/laconic-sdk
fetch-depth: 0
ref: main
- name: Environment
run: ls -tlh && env
- name: Build laconicd container
working-directory: tests/sdk_tests
run: ./build-laconicd-container.sh
- name: Build laconic-sdk container
working-directory: laconic-sdk
run: ./scripts/build-sdk-test-container.sh
- name: Start containers
working-directory: tests/sdk_tests
run: docker compose up -d
- name: Run tests
working-directory: tests/sdk_tests
run: ./run-tests.sh

View File

@ -1,23 +0,0 @@
name: Tests
on:
pull_request:
push:
branches:
- main
- release/**
jobs:
test-unit:
# This test case doesn't work in CI, run as root.
env:
SKIP_UNIT_TESTS: TestInitConfigNonNotExistError
runs-on: ubuntu-latest
steps:
- uses: actions/setup-go@v3
with:
go-version: 1.21
check-latest: true
- uses: actions/checkout@v3
- name: Test
run: |
make test-unit

5
.github/CODEOWNERS vendored Normal file
View File

@ -0,0 +1,5 @@
# CODEOWNERS: https://help.github.com/articles/about-codeowners/
# Primary (global) repo maintainers
* @evmos/core-engineering

18
.github/ISSUE_TEMPLATE/bug-report.md vendored Normal file
View File

@ -0,0 +1,18 @@
---
name: Bug Report
about: create a bug report
---
__System info:__ [Include Ethermint commit, operating system name, and other relevant details]
__Steps to reproduce:__
1. [First Step]
2. [Second Step]
3. [and so on...]
__Expected behavior:__ [What you expected to happen]
__Actual behavior:__ [What actually happened]
__Additional info:__ [Include gist of relevant config, logs, etc.]

View File

@ -0,0 +1,15 @@
---
name: Feature request
about: Opening a feature request kicks off a discussion
---
__Proposal:__ [Description of the feature]
__Current behavior:__ [What currently happens]
__Desired behavior:__ [What you would like to happen]
__Use case:__ [Why is this important (helps with prioritizing requests)]
Requests may be closed if we're not actively planning to work on them.

34
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View File

@ -0,0 +1,34 @@
<!-- < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < ☺
v ✰ Thanks for creating a PR! ✰
v Before smashing the submit button please review the checkboxes.
v If a checkbox is n/a - please still include it but + a little note why
☺ > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > -->
Closes: #XXX
## Description
<!-- Add a description of the changes that this PR introduces and the files that
are the most critical to review.
-->
______
For contributor use:
- [ ] Targeted PR against correct branch (see [CONTRIBUTING.md](https://github.com/cosmos/cosmos-sdk/blob/master/CONTRIBUTING.md#pr-targeting))
- [ ] Linked to Github issue with discussion and accepted design OR link to spec that describes this work.
- [ ] Code follows the [module structure standards](https://github.com/cosmos/cosmos-sdk/blob/master/docs/building-modules/structure.md).
- [ ] Wrote unit and integration [tests](https://github.com/cosmos/cosmos-sdk/blob/master/CONTRIBUTING.md#testing)
- [ ] Updated relevant documentation (`docs/`) or specification (`x/<module>/spec/`)
- [ ] Added relevant `godoc` [comments](https://blog.golang.org/godoc-documenting-go-code).
- [ ] Added a relevant changelog entry to the `Unreleased` section in `CHANGELOG.md`
- [ ] Re-reviewed `Files changed` in the Github PR explorer
______
For admin use:
- [ ] Added appropriate labels to PR (ex. `WIP`, `R4R`, `docs`, etc)
- [ ] Reviewers assigned
- [ ] Squashed all commits, uses message "Merge pull request #XYZ: [title]" ([coding standards](https://github.com/tendermint/coding/blob/master/README.md#merging-a-pr))

37
.github/labeler.yml vendored Normal file
View File

@ -0,0 +1,37 @@
"C:Crypto":
- crypto/**/*
"C:Encoding":
- encoding/**/*
"C:JSON-RPC":
- ethereum/rpc/**/*
"C:Proto":
- proto/**/*
- third_party/**/*
- /**/*.pb.go
- /**/*.pb.gw.go
"C:Types":
- types/**/*
"C:x/evm":
- x/evm/**/*/
"Type: Build":
- Makefile
- Dockerfile
- docker-compose.yml
- scripts/*
- config.yml
"Type: CI":
- .github/**/*.yml
- buf.yaml
- .mergify.yml
- .golangci.yml
"C:CLI":
- client/**/*
- x/*/client/**/*
"Type: Tests":
- tests/**/*
- /**/*/*_test.go
"Type: Docs":
- docs/**/*
- x/*/spec/**/*
"Type: ADR":
- docs/architecture/**/*

33
.github/workflows/build.yml vendored Normal file
View File

@ -0,0 +1,33 @@
name: Build
on:
pull_request:
branches:
- main
jobs:
cleanup-runs:
runs-on: ubuntu-latest
steps:
- uses: rokroskar/workflow-run-cleanup-action@master
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/main'"
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version: 1.19
check-latest: true
- uses: technote-space/get-diff-action@v6.1.2
id: git_diff
with:
PATTERNS: |
**/**.go
go.mod
go.sum
- run: |
make build
if: env.GIT_DIFF

84
.github/workflows/codeql-analysis.yml vendored Normal file
View File

@ -0,0 +1,84 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ main ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ main ]
schedule:
- cron: '37 21 * * 4'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'go' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more:
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
steps:
- name: Checkout repository
uses: actions/checkout@v3
- uses: technote-space/get-diff-action@v6.1.2
with:
PATTERNS: |
**/**.sol
**/**.go
**/**.ts
**/**.js
go.mod
go.sum
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
queries: crypto-com/cosmos-sdk-codeql@main,security-and-quality
if: env.GIT_DIFF
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
if: env.GIT_DIFF
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
if: env.GIT_DIFF

28
.github/workflows/dependencies.yml vendored Normal file
View File

@ -0,0 +1,28 @@
name: "Dependency Review"
on: pull_request
permissions:
contents: read
jobs:
dependency-review:
runs-on: ubuntu-latest
steps:
- uses: actions/setup-go@v3
with:
go-version: 1.19
check-latest: true
- name: "Checkout Repository"
uses: actions/checkout@v3
- uses: technote-space/get-diff-action@v6.1.2
with:
PATTERNS: |
**/**.go
go.mod
go.sum
- name: "Dependency Review"
uses: actions/dependency-review-action@v3
if: env.GIT_DIFF
- name: "Go vulnerability check"
run: make vulncheck
if: env.GIT_DIFF

38
.github/workflows/deploy-contract.yml vendored Normal file
View File

@ -0,0 +1,38 @@
name: Deploy Contract
on:
pull_request:
branches:
- main
jobs:
cleanup-runs:
runs-on: ubuntu-latest
steps:
- uses: rokroskar/workflow-run-cleanup-action@master
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/main'"
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Use Node.js
uses: actions/setup-node@v3
with:
node-version: '12.x'
- name: Install dependencies
run: npm install
- uses: technote-space/get-diff-action@v6.1.2
id: git_diff
with:
PATTERNS: |
**/**.sol
**/**.go
go.mod
go.sum
- name: Test contract
run: |
sudo make contract-tools
sudo make test-contract
if: env.GIT_DIFF

View File

@ -20,7 +20,7 @@ jobs:
- name: Tag docker image - name: Tag docker image
run: docker tag git.vdb.to/cerc-io/laconicd/laconicd:${{steps.vars.outputs.sha}} git.vdb.to/cerc-io/laconicd/laconicd:${{steps.vars.outputs.tag}} run: docker tag git.vdb.to/cerc-io/laconicd/laconicd:${{steps.vars.outputs.sha}} git.vdb.to/cerc-io/laconicd/laconicd:${{steps.vars.outputs.tag}}
- name: Docker Login - name: Docker Login
run: echo ${{ secrets.CICD_PUBLISH_TOKEN }} | docker login https://git.vdb.to -u cerccicd --password-stdin run: echo ${{ secrets.GITEA_TOKEN }} | docker login https://git.vdb.to -u cerccicd --password-stdin
- name: Docker Push - name: Docker Push
run: docker push git.vdb.to/cerc-io/laconicd/laconicd:${{steps.vars.outputs.sha}} run: docker push git.vdb.to/cerc-io/laconicd/laconicd:${{steps.vars.outputs.sha}}
- name: Docker Push TAGGED - name: Docker Push TAGGED

View File

@ -15,14 +15,14 @@ jobs:
- name: Set up Go - name: Set up Go
uses: actions/setup-go@v3 uses: actions/setup-go@v3
with: with:
go-version: 1.21 go-version: 1.19
check-latest: true check-latest: true
- name: release dry run - name: release dry run
run: make release-dry-run run: make release-dry-run
- name: setup release environment - name: setup release environment
env: env:
GITHUB_TOKEN: ${{ secrets.CICD_PUBLISH_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |- run: |-
echo 'GITHUB_TOKEN=${{secrets.CICD_PUBLISH_TOKEN}}' > .release-env echo 'GITHUB_TOKEN=${{secrets.GITHUB_TOKEN}}' > .release-env
- name: release publish - name: release publish
run: make release run: make release

View File

@ -0,0 +1,29 @@
name: Notion Sync
on:
workflow_dispatch:
issues:
types:
[
opened,
edited,
labeled,
unlabeled,
assigned,
unassigned,
milestoned,
demilestoned,
reopened,
closed,
]
jobs:
notion_job:
runs-on: ubuntu-latest
name: Add GitHub Issues to Notion
steps:
- name: Add GitHub Issues to Notion
uses: vulcanize/notion-github-action@v1.2.4-issueid
with:
notion-token: ${{ secrets.NOTION_TOKEN }}
notion-db: ${{ secrets.NOTION_DATABASE }}

14
.github/workflows/labeler.yml vendored Normal file
View File

@ -0,0 +1,14 @@
name: "Pull Request Labeler"
on:
pull_request:
push:
branches:
- main
jobs:
triage:
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@v4
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"

View File

@ -16,27 +16,37 @@ jobs:
# Required: setup-go, for all versions v3.0.0+ of golangci-lint # Required: setup-go, for all versions v3.0.0+ of golangci-lint
- uses: actions/setup-go@v3 - uses: actions/setup-go@v3
with: with:
go-version: 1.21 go-version: 1.19
check-latest: true check-latest: true
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- uses: technote-space/get-diff-action@v6.1.2
with:
PATTERNS: |
**/**.go
go.mod
go.sum
- uses: golangci/golangci-lint-action@v3.3.1 - uses: golangci/golangci-lint-action@v3.3.1
with: with:
# Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version. # Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version.
version: latest version: latest
args: --timeout 10m args: --timeout 10m
github-token: ${{ secrets.github_token }} github-token: ${{ secrets.github_token }}
# Check only if there are differences in the source code
if: env.GIT_DIFF
python-lint: python-lint:
# For compatibility with Gitea
env:
USER: root
name: Run flake8 on python integration tests name: Run flake8 on python integration tests
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: cachix/install-nix-action@v20 - uses: cachix/install-nix-action@v18
- uses: cachix/cachix-action@v12 - uses: cachix/cachix-action@v12
with: with:
name: ethermint name: ethermint
- uses: technote-space/get-diff-action@v6.1.2
with:
PATTERNS: |
**/**.py
- run: | - run: |
nix-shell -I nixpkgs=./nix -p test-env --run "make lint-py" nix-shell -I nixpkgs=./nix -p test-env --run "make lint-py"
if: env.GIT_DIFF

37
.github/workflows/security.yml vendored Normal file
View File

@ -0,0 +1,37 @@
name: Run Gosec
on:
pull_request:
push:
branches:
- main
jobs:
Gosec:
permissions:
security-events: write
runs-on: ubuntu-latest
env:
GO111MODULE: on
steps:
- name: Checkout Source
uses: actions/checkout@v3
- name: Get Diff
uses: technote-space/get-diff-action@v6.1.2
with:
PATTERNS: |
**/*.go
go.mod
go.sum
- name: Run Gosec Security Scanner
uses: cosmos/gosec@master
with:
# we let the report trigger content trigger a failure using the GitHub Security features.
args: '-no-fail -fmt sarif -out results.sarif -exclude=G701,G703 ./...'
if: "env.GIT_DIFF_FILTERED != ''"
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@v2
with:
# Path to SARIF file relative to the root of the repository
sarif_file: results.sarif
if: "env.GIT_DIFF_FILTERED != ''"

45
.github/workflows/semgrep.yml vendored Normal file
View File

@ -0,0 +1,45 @@
name: Semgrep
on:
# Scan changed files in PRs, block on new issues only (existing issues ignored)
pull_request: {}
push:
branches:
- main
paths:
- .github/workflows/semgrep.yml
schedule:
- cron: '0 0 * * 0'
jobs:
# Update from: https://semgrep.dev/docs/semgrep-ci/sample-ci-configs/#github-actions
semgrep:
name: Scan
runs-on: ubuntu-latest
container:
image: returntocorp/semgrep
if: (github.actor != 'dependabot[bot]')
steps:
- name: Permission issue fix
run: git config --global --add safe.directory /__w/laconicd/laconicd
- uses: actions/checkout@v3
- name: Get Diff
uses: technote-space/get-diff-action@v6.1.2
with:
PATTERNS: |
**/*.go
**/*.js
**/*.ts
**/*.sol
go.mod
go.sum
- uses: actions/checkout@v3
- run: semgrep scan --sarif --output=semgrep.sarif --config auto
env:
# Upload findings to GitHub Advanced Security Dashboard [step 1/2]
SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }}
if: "env.GIT_DIFF_FILTERED != ''"
# Upload findings to GitHub Advanced Security Dashboard [step 2/2]
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@v2
with:
sarif_file: semgrep.sarif
if: "env.GIT_DIFF_FILTERED != ''"

21
.github/workflows/stale.yml vendored Normal file
View File

@ -0,0 +1,21 @@
name: "Close stale issues & pull requests"
on:
schedule:
- cron: "0 0 * * *"
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v6
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-pr-message: "This pull request has been automatically marked as stale because it has not had
recent activity. It will be closed in 7 days-before-close if no further activity occurs."
stale-issue-message: "This issue is stale because it has been open 45 days with no activity. Remove `Status: Stale` label or comment or this will be closed in 7 days."
days-before-stale: 45
days-before-close: 7
exempt-issue-labels: "Status: Blocked, Type: Bug, pinned, automerge"
exempt-pr-labels: "Status: Blocked, Type: Bug, pinned, automerge"
stale-pr-label: "Status: Stale"
stale-issue-label: "Status: Stale"

176
.github/workflows/test.yml vendored Normal file
View File

@ -0,0 +1,176 @@
name: Tests
on:
pull_request:
push:
branches:
- main
- release/**
jobs:
cleanup-runs:
runs-on: ubuntu-latest
steps:
- uses: rokroskar/workflow-run-cleanup-action@master
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/main'"
test-unit-cover:
runs-on: ubuntu-latest
steps:
- uses: actions/setup-go@v3
with:
go-version: 1.19
check-latest: true
- uses: actions/checkout@v3
- uses: technote-space/get-diff-action@v6.1.2
with:
PATTERNS: |
**/**.go
go.mod
go.sum
- name: Test and Create Coverage Report
run: |
make test-unit-cover
if: env.GIT_DIFF
- uses: codecov/codecov-action@v3
with:
file: ./coverage.txt
fail_ci_if_error: false
if: env.GIT_DIFF
test-importer:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/setup-go@v3
with:
go-version: 1.19
check-latest: true
- uses: actions/checkout@v3
- uses: technote-space/get-diff-action@v6.1.2
id: git_diff
with:
PATTERNS: |
**/**.go
go.mod
go.sum
- name: test-importer
run: |
make test-import
if: env.GIT_DIFF
test-rpc:
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- uses: actions/setup-go@v3
with:
go-version: 1.19
check-latest: true
- uses: actions/checkout@v3
- uses: technote-space/get-diff-action@v6.1.2
with:
PATTERNS: |
**/**.sol
**/**.go
go.mod
go.sum
- name: Test rpc endpoint
run: |
make test-rpc
if: env.GIT_DIFF
sdk_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Checkout laconic-sdk
uses: actions/checkout@v3
with:
path: "./laconic-sdk/"
repository: cerc-io/laconic-sdk
fetch-depth: 0
ref: jest_timeout
- name: Environment
run: ls -tlh && env
- name: build containers scripts
working-directory: tests/sdk_tests
run: ./build-laconicd-container.sh && ./build-sdk-test-container.sh
- name: start containers
working-directory: tests/sdk_tests
run: docker compose up -d
- name: run-tests.sh
working-directory: tests/sdk_tests
run: ./run-tests.sh
- name: reset containers for auction tests
working-directory: tests/sdk_tests
if: always()
run: docker compose down
- name: start auction containers
working-directory: tests/sdk_tests
run: docker compose -f docker-compose-auctions.yml up -d
- name: run-acution-tests.sh
working-directory: tests/sdk_tests
run: ./run-auction-tests.sh
- name: reset containers for nameservice tests
working-directory: tests/sdk_tests
if: always()
run: docker compose -f docker-compose-auctions.yml down
- name: start auction containers
working-directory: tests/sdk_tests
run: docker compose -f docker-compose-nameservice.yml up -d
- name: run-nameservice-expiry-tests.sh
working-directory: tests/sdk_tests
run: ./run-nameservice-expiry-tests.sh
- name: reset containers for nameservice tests
working-directory: tests/sdk_tests
if: always()
run: docker compose -f docker-compose-nameservice.yml down
# integration_tests:
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v2
# - uses: cachix/install-nix-action@v18
# - uses: cachix/cachix-action@v12
# with:
# name: ethermint
# signingKey: "${{ secrets.CACHIX_SIGNING_KEY }}"
# - uses: technote-space/get-diff-action@v6.1.2
# with:
# PATTERNS: |
# **/**.sol
# **/**.go
# go.mod
# go.sum
# tests/integration_tests/**
# - name: Run integration tests
# run: make run-integration-tests
# if: env.GIT_DIFF
# - name: 'Tar debug files'
# if: failure()
# run: tar cfz debug_files.tar.gz -C /tmp/pytest-of-runner .
# - uses: actions/upload-artifact@v3
# if: failure()
# with:
# name: debug-files
# path: debug_files.tar.gz
# if-no-files-found: ignore
# upload-cache:
# if: github.event_name == 'push'
# needs: ["integration_tests"]
# strategy:
# matrix:
# os: [macos-latest]
# runs-on: ${{ matrix.os }}
# steps:
# - uses: actions/checkout@v2
# - uses: cachix/install-nix-action@v18
# - uses: cachix/cachix-action@v12
# with:
# name: ethermint
# signingKey: "${{ secrets.CACHIX_SIGNING_KEY }}"
# - name: 'instantiate integration test env'
# run: nix-store -r "$(nix-instantiate tests/integration_tests/shell.nix)"

View File

@ -6,13 +6,14 @@ run:
linters: linters:
enable: enable:
- bodyclose - bodyclose
# - depguard # 20231120 disable until https://github.com/golangci/golangci-lint/issues/3906 is released - depguard
- dogsled - dogsled
- dupl
- errcheck - errcheck
- goconst - goconst
- gocritic - gocritic
- gofumpt - gofumpt
# - revive # 20231120 overly sensitive unused detection - revive
- gosec - gosec
- gosimple - gosimple
- govet - govet

View File

@ -19,9 +19,6 @@ test/
tests/ tests/
*_test.go *_test.go
# false positive; TODO: https://github.com/cerc-io/laconicd/issues/104
testutil/network/network.go
# Semgrep rules folder # Semgrep rules folder
.semgrep .semgrep

View File

@ -1,14 +1,14 @@
FROM golang:alpine AS build-env FROM golang:alpine AS build-env
# Install dependencies # Set up dependencies
RUN apk add --update git build-base linux-headers ENV PACKAGES git build-base
# Set working directory for the build # Set working directory for the build
WORKDIR /go/src/github.com/cerc-io/laconicd WORKDIR /go/src/github.com/cerc-io/laconicd
# Cache Go modules # Install dependencies
COPY go.mod go.sum ./ RUN apk add --update $PACKAGES
RUN go mod download RUN apk add linux-headers
# Add source files # Add source files
COPY . . COPY . .
@ -21,10 +21,10 @@ FROM alpine:3.17.0
# Install ca-certificates # Install ca-certificates
RUN apk add --update ca-certificates jq curl RUN apk add --update ca-certificates jq curl
WORKDIR /
# Copy over binaries from the build-env # Copy over binaries from the build-env
COPY --from=build-env /go/src/github.com/cerc-io/laconicd/build/laconicd /usr/bin/laconicd COPY --from=build-env /go/src/github.com/cerc-io/laconicd/build/laconicd /usr/bin/laconicd
WORKDIR /
# Run laconicd by default # Run laconicd by default
CMD ["laconicd"] CMD ["laconicd"]

View File

@ -72,7 +72,7 @@ ifeq ($(ENABLE_ROCKSDB),true)
BUILD_TAGS += rocksdb_build BUILD_TAGS += rocksdb_build
test_tags += rocksdb_build test_tags += rocksdb_build
else else
$(info RocksDB support is disabled; to build and test with RocksDB support, set ENABLE_ROCKSDB=true) $(warning RocksDB support is disabled; to build and test with RocksDB support, set ENABLE_ROCKSDB=true)
endif endif
# DB backend selection # DB backend selection
@ -145,7 +145,7 @@ docker-build:
# update old container # update old container
docker rm laconicd || true docker rm laconicd || true
# create a new container from the latest image # create a new container from the latest image
docker create --name laconic -t -i ${DOCKER_IMAGE}:${DOCKER_TAG} laconicd docker create --name laconic -t -i cerc-io/laconicd:latest laconicd
# move the binaries to the ./build directory # move the binaries to the ./build directory
mkdir -p ./build/ mkdir -p ./build/
docker cp laconic:/usr/bin/laconicd ./build/ docker cp laconic:/usr/bin/laconicd ./build/
@ -237,7 +237,7 @@ endif
ifeq (, $(shell which go-bindata)) ifeq (, $(shell which go-bindata))
@echo "Installing go-bindata..." @echo "Installing go-bindata..."
@go get github.com/kevinburke/go-bindata/go-bindata@v3 @go get github.com/kevinburke/go-bindata/go-bindata
else else
@echo "go-bindata already installed; skipping..." @echo "go-bindata already installed; skipping..."
endif endif
@ -316,7 +316,7 @@ TEST_TARGETS := test-unit test-unit-cover test-race
# Test runs-specific rules. To add a new test target, just add # Test runs-specific rules. To add a new test target, just add
# a new rule, customise ARGS or TEST_PACKAGES ad libitum, and # a new rule, customise ARGS or TEST_PACKAGES ad libitum, and
# append the new rule to the TEST_TARGETS list. # append the new rule to the TEST_TARGETS list.
test-unit: ARGS=-timeout=10m -race -test.v -skip $(SKIP_UNIT_TESTS) test-unit: ARGS=-timeout=10m -race
test-unit: TEST_PACKAGES=$(PACKAGES_UNIT) test-unit: TEST_PACKAGES=$(PACKAGES_UNIT)
test-race: ARGS=-race test-race: ARGS=-race
@ -334,10 +334,10 @@ else
endif endif
test-import: test-import:
go test -run TestImporterTestSuite -timeout=20m -v --vet=off github.com/cerc-io/laconicd/tests/importer go test -run TestImporterTestSuite -v --vet=off github.com/cerc-io/laconicd/tests/importer
test-rpc: test-rpc:
./scripts/integration-test-all.sh -t "rpc" -q 1 -z 1 -s 10 -m "rpc" -r "true" ./scripts/integration-test-all.sh -t "rpc" -q 1 -z 1 -s 2 -m "rpc" -r "true"
run-integration-tests: run-integration-tests:
@nix-shell ./tests/integration_tests/shell.nix --run ./scripts/run-integration-tests.sh @nix-shell ./tests/integration_tests/shell.nix --run ./scripts/run-integration-tests.sh
@ -389,7 +389,7 @@ format-fix:
############################################################################### ###############################################################################
# ------ # ------
# NOTE: Link to the tendermintdev/sdk-proto-gen docker images: # NOTE: Link to the tendermintdev/sdk-proto-gen docker images:
# https://hub.docker.com/r/tendermintdev/sdk-proto-gen/tags # https://hub.docker.com/r/tendermintdev/sdk-proto-gen/tags
# #
protoVer=v0.7 protoVer=v0.7

View File

@ -2,7 +2,6 @@ package eip712
import ( import (
"bytes" "bytes"
"encoding/base64"
"encoding/json" "encoding/json"
"fmt" "fmt"
"math/big" "math/big"
@ -10,22 +9,22 @@ import (
"strings" "strings"
"time" "time"
errorsmod "cosmossdk.io/errors"
sdkmath "cosmossdk.io/math" sdkmath "cosmossdk.io/math"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
"github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
sdk "github.com/cosmos/cosmos-sdk/types"
errortypes "github.com/cosmos/cosmos-sdk/types/errors"
"golang.org/x/text/cases" "golang.org/x/text/cases"
"golang.org/x/text/language" "golang.org/x/text/language"
errorsmod "cosmossdk.io/errors"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
sdk "github.com/cosmos/cosmos-sdk/types"
errortypes "github.com/cosmos/cosmos-sdk/types/errors"
registry "github.com/cerc-io/laconicd/x/registry/types"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/math" "github.com/ethereum/go-ethereum/common/math"
"github.com/ethereum/go-ethereum/signer/core/apitypes" "github.com/ethereum/go-ethereum/signer/core/apitypes"
) )
const bytesStr = "bytes"
// WrapTxToTypedData is an ultimate method that wraps Amino-encoded Cosmos Tx JSON data // WrapTxToTypedData is an ultimate method that wraps Amino-encoded Cosmos Tx JSON data
// into an EIP712-compatible TypedData request. // into an EIP712-compatible TypedData request.
func WrapTxToTypedData( func WrapTxToTypedData(
@ -35,6 +34,25 @@ func WrapTxToTypedData(
data []byte, data []byte,
feeDelegation *FeeDelegationOptions, feeDelegation *FeeDelegationOptions,
) (apitypes.TypedData, error) { ) (apitypes.TypedData, error) {
txData := make(map[string]interface{})
if err := json.Unmarshal(data, &txData); err != nil {
return apitypes.TypedData{}, errorsmod.Wrap(errortypes.ErrJSONUnmarshal, "failed to JSON unmarshal data")
}
if txData["msgs"].([]interface{})[0].(map[string]interface{})["value"].(map[string]interface{})["payload"] != nil {
setRecordMsg := msg.(*registry.MsgSetRecord)
var attr []interface{}
for _, b := range setRecordMsg.Payload.Record.Attributes.Value {
attr = append(attr, fmt.Sprintf("%v", b))
}
txData["msgs"].([]interface{})[0].(map[string]interface{})["value"].(map[string]interface{})["payload"].(map[string]interface{})["record"].(map[string]interface{})["attributes"] = map[string]interface{}{ //nolint:lll
"type_url": setRecordMsg.Payload.Record.Attributes.TypeUrl,
"value": attr,
}
}
domain := apitypes.TypedDataDomain{ domain := apitypes.TypedDataDomain{
Name: "Cosmos Web3", Name: "Cosmos Web3",
Version: "1.0.0", Version: "1.0.0",
@ -48,13 +66,22 @@ func WrapTxToTypedData(
return apitypes.TypedData{}, err return apitypes.TypedData{}, err
} }
txData := make(map[string]interface{}) if msgTypes["TypePayloadRecord"] != nil {
if err := json.Unmarshal(data, &txData); err != nil { msgTypes["TypePayloadRecord"] = []apitypes.Type{
return apitypes.TypedData{}, errorsmod.Wrap(errortypes.ErrJSONUnmarshal, "failed to JSON unmarshal data") {Name: "id", Type: "string"},
{Name: "bond_id", Type: "string"},
{Name: "create_time", Type: "string"},
{Name: "expiry_time", Type: "string"},
{Name: "deleted", Type: "bool"},
{Name: "attributes", Type: "TypePayloadRecordAttributes"},
}
} }
if msgTypes["TypePayloadRecordAttributes"] != nil {
if err := patchTxData(txData, msgTypes, "Tx"); err != nil { msgTypes["TypePayloadRecordAttributes"] = []apitypes.Type{
return apitypes.TypedData{}, errorsmod.Wrap(errortypes.ErrJSONUnmarshal, "failed to patch JSON data") {Name: "type_url", Type: "string"},
{Name: "value", Type: "uint8[]"},
}
delete(msgTypes, "TypePayloadRecordAttributesValue")
} }
if feeDelegation != nil { if feeDelegation != nil {
@ -293,15 +320,10 @@ func traverseFields(
ethTyp := typToEth(fieldType) ethTyp := typToEth(fieldType)
if len(ethTyp) > 0 { if len(ethTyp) > 0 {
// Support array of uint64 // Support array of uint64
if isCollection { if isCollection && fieldType.Kind() != reflect.Slice && fieldType.Kind() != reflect.Array {
if fieldType.Kind() != reflect.Slice && fieldType.Kind() != reflect.Array { ethTyp += "[]"
ethTyp += "[]"
}
// convert uint8[] to bytes
if fieldType.Kind() == reflect.Uint8 {
ethTyp = bytesStr
}
} }
if prefix == typeDefPrefix { if prefix == typeDefPrefix {
typeMap[rootType] = append(typeMap[rootType], apitypes.Type{ typeMap[rootType] = append(typeMap[rootType], apitypes.Type{
Name: fieldName, Name: fieldName,
@ -444,13 +466,14 @@ func typToEth(typ reflect.Type) string {
return "uint32" return "uint32"
case reflect.Uint64: case reflect.Uint64:
return "uint64" return "uint64"
case reflect.Slice | reflect.Array: case reflect.Slice:
// Note: this case may never be reached due to previous handling in traverseFields ethName := typToEth(typ.Elem())
if len(ethName) > 0 {
return ethName + "[]"
}
case reflect.Array:
ethName := typToEth(typ.Elem()) ethName := typToEth(typ.Elem())
if len(ethName) > 0 { if len(ethName) > 0 {
if ethName == "uint8" {
return bytesStr
}
return ethName + "[]" return ethName + "[]"
} }
case reflect.Ptr: case reflect.Ptr:
@ -487,77 +510,3 @@ func doRecover(err *error) {
*err = fmt.Errorf("%v", r) *err = fmt.Errorf("%v", r)
} }
} }
// Performs extra type conversions on JSON-decoded data accoding to the provided type definitions
// for compatibility with Geth's encoding
func patchTxData(data map[string]any, schema apitypes.Types, rootType string) error {
// Scan the data for any types that need to be converted.
// This is adapted from TypedData.EncodeData
for _, field := range schema[rootType] {
encType := field.Type
encValue := data[field.Name]
switch {
case encType[len(encType)-1:] == "]":
arrayValue, ok := encValue.([]interface{})
if !ok {
return dataMismatchError(encType, encValue)
}
parsedType := strings.Split(encType, "[")[0]
if schema[parsedType] != nil {
for _, item := range arrayValue {
mapValue, ok := item.(map[string]interface{})
if !ok {
return dataMismatchError(parsedType, item)
}
err := patchTxData(mapValue, schema, parsedType)
if err != nil {
return err
}
}
} else {
for i, item := range arrayValue {
converted, err := handleConversion(parsedType, item)
if err != nil {
return err
}
arrayValue[i] = converted
}
}
case schema[encType] != nil:
mapValue, ok := encValue.(map[string]interface{})
if !ok {
return dataMismatchError(encType, encValue)
}
err := patchTxData(mapValue, schema, encType)
if err != nil {
return err
}
default:
converted, err := handleConversion(encType, encValue)
if err != nil {
return err
}
data[field.Name] = converted
}
}
return nil
}
func handleConversion(encType string, encValue any) (any, error) {
if encType == bytesStr {
// Protobuf encodes byte strings in base64
if v, ok := encValue.(string); ok {
return base64.StdEncoding.DecodeString(v)
}
}
return encValue, nil
}
// dataMismatchError generates an error for a mismatch between
// the provided type and data
func dataMismatchError(encType string, encValue any) error {
return fmt.Errorf("provided data '%v' doesn't match type '%s'", encValue, encType)
}

View File

@ -4,25 +4,28 @@ import (
"testing" "testing"
"cosmossdk.io/math" "cosmossdk.io/math"
registrytypes "github.com/cerc-io/laconicd/x/registry/types"
"github.com/cerc-io/laconicd/ethereum/eip712"
"github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
"github.com/cosmos/cosmos-sdk/simapp/params" "github.com/cosmos/cosmos-sdk/simapp/params"
"github.com/ethereum/go-ethereum/crypto"
"github.com/cerc-io/laconicd/crypto/ethsecp256k1"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cerc-io/laconicd/app"
"github.com/cerc-io/laconicd/encoding"
banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
txtypes "github.com/cosmos/cosmos-sdk/types/tx" txtypes "github.com/cosmos/cosmos-sdk/types/tx"
"github.com/cosmos/cosmos-sdk/types/tx/signing" "github.com/cosmos/cosmos-sdk/types/tx/signing"
authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing" authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing"
banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
distributiontypes "github.com/cosmos/cosmos-sdk/x/distribution/types" distributiontypes "github.com/cosmos/cosmos-sdk/x/distribution/types"
govtypes "github.com/cosmos/cosmos-sdk/x/gov/types/v1beta1" govtypes "github.com/cosmos/cosmos-sdk/x/gov/types/v1beta1"
stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types"
"github.com/ethereum/go-ethereum/crypto"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/cerc-io/laconicd/app"
"github.com/cerc-io/laconicd/crypto/ethsecp256k1"
"github.com/cerc-io/laconicd/encoding"
"github.com/cerc-io/laconicd/ethereum/eip712"
) )
// Unit tests for single-signer EIP-712 signature verification. Multi-signer verification tests are included // Unit tests for single-signer EIP-712 signature verification. Multi-signer verification tests are included
@ -329,54 +332,6 @@ func (suite *EIP712TestSuite) TestEIP712SignatureVerification() {
sequence: 78, sequence: 78,
expectSuccess: false, expectSuccess: false,
}, },
// test laconic registry messages
{
title: "Succeeds - Standard MsgSetName",
fee: txtypes.Fee{
Amount: suite.makeCoins("aphoton", math.NewInt(2000)),
GasLimit: 100000,
},
memo: "",
msgs: []sdk.Msg{
registrytypes.NewMsgSetName(
"testcrn",
"testcid",
suite.createTestAddress(),
),
},
accountNumber: 25,
sequence: 78,
expectSuccess: true,
},
{
title: "Succeeds - Standard MsgSetRecord",
fee: txtypes.Fee{
Amount: suite.makeCoins("aphoton", math.NewInt(2000)),
GasLimit: 100000,
},
memo: "",
msgs: []sdk.Msg{
registrytypes.NewMsgSetRecord(
registrytypes.Payload{
Record: &registrytypes.Record{
Attributes: []byte("test attributes"),
},
Signatures: []registrytypes.Signature{
{
Sig: "fake sig",
PubKey: "fake pubkey",
},
},
},
"testbondid",
suite.createTestAddress(),
),
},
accountNumber: 25,
sequence: 78,
expectSuccess: true,
},
} }
for _, tc := range testCases { for _, tc := range testCases {

5
go.mod
View File

@ -27,7 +27,7 @@ require (
github.com/holiman/uint256 v1.2.1 github.com/holiman/uint256 v1.2.1
github.com/improbable-eng/grpc-web v0.15.0 github.com/improbable-eng/grpc-web v0.15.0
github.com/ipfs/go-cid v0.3.2 github.com/ipfs/go-cid v0.3.2
github.com/ipld/go-ipld-prime v0.19.0 github.com/ipld/go-ipld-prime v0.18.0
github.com/miguelmota/go-ethereum-hdwallet v0.1.1 github.com/miguelmota/go-ethereum-hdwallet v0.1.1
github.com/onsi/ginkgo/v2 v2.5.1 github.com/onsi/ginkgo/v2 v2.5.1
github.com/onsi/gomega v1.24.1 github.com/onsi/gomega v1.24.1
@ -134,7 +134,6 @@ require (
github.com/jackpal/go-nat-pmp v1.0.2 // indirect github.com/jackpal/go-nat-pmp v1.0.2 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/jmhodges/levigo v1.0.0 // indirect github.com/jmhodges/levigo v1.0.0 // indirect
github.com/kevinburke/go-bindata v3.24.0+incompatible // indirect
github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d // indirect github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d // indirect
github.com/klauspost/compress v1.15.11 // indirect github.com/klauspost/compress v1.15.11 // indirect
github.com/klauspost/cpuid/v2 v2.0.9 // indirect github.com/klauspost/cpuid/v2 v2.0.9 // indirect
@ -157,7 +156,7 @@ require (
github.com/multiformats/go-base32 v0.0.3 // indirect github.com/multiformats/go-base32 v0.0.3 // indirect
github.com/multiformats/go-base36 v0.1.0 // indirect github.com/multiformats/go-base36 v0.1.0 // indirect
github.com/multiformats/go-multibase v0.0.3 // indirect github.com/multiformats/go-multibase v0.0.3 // indirect
github.com/multiformats/go-multihash v0.2.1 // indirect github.com/multiformats/go-multihash v0.2.0 // indirect
github.com/multiformats/go-varint v0.0.6 // indirect github.com/multiformats/go-varint v0.0.6 // indirect
github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect
github.com/pelletier/go-toml v1.9.5 // indirect github.com/pelletier/go-toml v1.9.5 // indirect

16
go.sum
View File

@ -798,8 +798,8 @@ github.com/influxdata/tdigest v0.0.0-20181121200506-bf2b5ad3c0a9/go.mod h1:Js0mq
github.com/influxdata/usage-client v0.0.0-20160829180054-6d3895376368/go.mod h1:Wbbw6tYNvwa5dlB6304Sd+82Z3f7PmVZHVKU637d4po= github.com/influxdata/usage-client v0.0.0-20160829180054-6d3895376368/go.mod h1:Wbbw6tYNvwa5dlB6304Sd+82Z3f7PmVZHVKU637d4po=
github.com/ipfs/go-cid v0.3.2 h1:OGgOd+JCFM+y1DjWPmVH+2/4POtpDzwcr7VgnB7mZXc= github.com/ipfs/go-cid v0.3.2 h1:OGgOd+JCFM+y1DjWPmVH+2/4POtpDzwcr7VgnB7mZXc=
github.com/ipfs/go-cid v0.3.2/go.mod h1:gQ8pKqT/sUxGY+tIwy1RPpAojYu7jAyCp5Tz1svoupw= github.com/ipfs/go-cid v0.3.2/go.mod h1:gQ8pKqT/sUxGY+tIwy1RPpAojYu7jAyCp5Tz1svoupw=
github.com/ipld/go-ipld-prime v0.19.0 h1:5axC7rJmPc17Emw6TelxGwnzALk0PdupZ2oj2roDj04= github.com/ipld/go-ipld-prime v0.18.0 h1:xUk7NUBSWHEXdjiOu2sLXouFJOMs0yoYzeI5RAqhYQo=
github.com/ipld/go-ipld-prime v0.19.0/go.mod h1:Q9j3BaVXwaA3o5JUDNvptDDr/x8+F7FG6XJ8WI3ILg4= github.com/ipld/go-ipld-prime v0.18.0/go.mod h1:735yXW548CKrLwVCYXzqx90p5deRJMVVxM9eJ4Qe+qE=
github.com/jackpal/go-nat-pmp v1.0.2-0.20160603034137-1fa385a6f458/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc= github.com/jackpal/go-nat-pmp v1.0.2-0.20160603034137-1fa385a6f458/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc=
github.com/jackpal/go-nat-pmp v1.0.2 h1:KzKSgb7qkJvOUTqYl9/Hg/me3pWgBmERKrTGD7BdWus= github.com/jackpal/go-nat-pmp v1.0.2 h1:KzKSgb7qkJvOUTqYl9/Hg/me3pWgBmERKrTGD7BdWus=
github.com/jackpal/go-nat-pmp v1.0.2/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc= github.com/jackpal/go-nat-pmp v1.0.2/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc=
@ -836,12 +836,6 @@ github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E
github.com/jwilder/encoding v0.0.0-20170811194829-b4e1701a28ef/go.mod h1:Ct9fl0F6iIOGgxJ5npU/IUOhOhqlVrGjyIZc8/MagT0= github.com/jwilder/encoding v0.0.0-20170811194829-b4e1701a28ef/go.mod h1:Ct9fl0F6iIOGgxJ5npU/IUOhOhqlVrGjyIZc8/MagT0=
github.com/karalabe/usb v0.0.0-20190919080040-51dc0efba356/go.mod h1:Od972xHfMJowv7NGVDiWVxk2zxnWgjLlJzE+F4F7AGU= github.com/karalabe/usb v0.0.0-20190919080040-51dc0efba356/go.mod h1:Od972xHfMJowv7NGVDiWVxk2zxnWgjLlJzE+F4F7AGU=
github.com/karalabe/usb v0.0.2/go.mod h1:Od972xHfMJowv7NGVDiWVxk2zxnWgjLlJzE+F4F7AGU= github.com/karalabe/usb v0.0.2/go.mod h1:Od972xHfMJowv7NGVDiWVxk2zxnWgjLlJzE+F4F7AGU=
github.com/kevinburke/go-bindata v1.1.0 h1:JZ8XIdxtkAszuYkgf17qwQzcU/RgsGSVLSULJdN+5bU=
github.com/kevinburke/go-bindata v1.1.0/go.mod h1:UJ72WTOoRKzbz+vwtKgARJwsy+1ZGUukVBUCc197TJo=
github.com/kevinburke/go-bindata v3.22.0+incompatible h1:/JmqEhIWQ7GRScV0WjX/0tqBrC5D21ALg0H0U/KZ/ts=
github.com/kevinburke/go-bindata v3.22.0+incompatible/go.mod h1:/pEEZ72flUW2p0yi30bslSp9YqD9pysLxunQDdb2CPM=
github.com/kevinburke/go-bindata v3.24.0+incompatible h1:qajFA3D0pH94OTLU4zcCCKCDgR+Zr2cZK/RPJHDdFoY=
github.com/kevinburke/go-bindata v3.24.0+incompatible/go.mod h1:/pEEZ72flUW2p0yi30bslSp9YqD9pysLxunQDdb2CPM=
github.com/kevinmbeaulieu/eq-go v1.0.0/go.mod h1:G3S8ajA56gKBZm4UB9AOyoOS37JO3roToPzKNM8dtdM= github.com/kevinmbeaulieu/eq-go v1.0.0/go.mod h1:G3S8ajA56gKBZm4UB9AOyoOS37JO3roToPzKNM8dtdM=
github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d h1:Z+RDyXzjKE0i2sTjZ/b1uxiGtPhFy34Ou/Tk0qwN0kM= github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d h1:Z+RDyXzjKE0i2sTjZ/b1uxiGtPhFy34Ou/Tk0qwN0kM=
github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d/go.mod h1:JJNrCn9otv/2QP4D7SMJBgaleKpOf66PnW6F5WGNRIc= github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d/go.mod h1:JJNrCn9otv/2QP4D7SMJBgaleKpOf66PnW6F5WGNRIc=
@ -973,9 +967,9 @@ github.com/multiformats/go-base36 v0.1.0 h1:JR6TyF7JjGd3m6FbLU2cOxhC0Li8z8dLNGQ8
github.com/multiformats/go-base36 v0.1.0/go.mod h1:kFGE83c6s80PklsHO9sRn2NCoffoRdUUOENyW/Vv6sM= github.com/multiformats/go-base36 v0.1.0/go.mod h1:kFGE83c6s80PklsHO9sRn2NCoffoRdUUOENyW/Vv6sM=
github.com/multiformats/go-multibase v0.0.3 h1:l/B6bJDQjvQ5G52jw4QGSYeOTZoAwIO77RblWplfIqk= github.com/multiformats/go-multibase v0.0.3 h1:l/B6bJDQjvQ5G52jw4QGSYeOTZoAwIO77RblWplfIqk=
github.com/multiformats/go-multibase v0.0.3/go.mod h1:5+1R4eQrT3PkYZ24C3W2Ue2tPwIdYQD509ZjSb5y9Oc= github.com/multiformats/go-multibase v0.0.3/go.mod h1:5+1R4eQrT3PkYZ24C3W2Ue2tPwIdYQD509ZjSb5y9Oc=
github.com/multiformats/go-multicodec v0.6.0 h1:KhH2kSuCARyuJraYMFxrNO3DqIaYhOdS039kbhgVwpE= github.com/multiformats/go-multicodec v0.5.0 h1:EgU6cBe/D7WRwQb1KmnBvU7lrcFGMggZVTPtOW9dDHs=
github.com/multiformats/go-multihash v0.2.1 h1:aem8ZT0VA2nCHHk7bPJ1BjUbHNciqZC/d16Vve9l108= github.com/multiformats/go-multihash v0.2.0 h1:oytJb9ZA1OUW0r0f9ea18GiaPOo4SXyc7p2movyUuo4=
github.com/multiformats/go-multihash v0.2.1/go.mod h1:WxoMcYG85AZVQUyRyo9s4wULvW5qrI9vb2Lt6evduFc= github.com/multiformats/go-multihash v0.2.0/go.mod h1:WxoMcYG85AZVQUyRyo9s4wULvW5qrI9vb2Lt6evduFc=
github.com/multiformats/go-varint v0.0.6 h1:gk85QWKxh3TazbLxED/NlDVv8+q+ReFJk7Y2W/KhfNY= github.com/multiformats/go-varint v0.0.6 h1:gk85QWKxh3TazbLxED/NlDVv8+q+ReFJk7Y2W/KhfNY=
github.com/multiformats/go-varint v0.0.6/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE= github.com/multiformats/go-varint v0.0.6/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +1,5 @@
# .gqlgen.yml example
#
# Refer to https://gqlgen.com/config/ # Refer to https://gqlgen.com/config/
# for detailed .gqlgen.yml documentation. # for detailed .gqlgen.yml documentation.
@ -10,8 +12,3 @@ model:
resolver: resolver:
filename: resolver.go filename: resolver.go
type: Resolver type: Resolver
models:
Link:
model:
- github.com/cerc-io/laconicd/gql.Link

View File

@ -2,10 +2,6 @@
package gql package gql
type Value interface {
IsValue()
}
type Account struct { type Account struct {
Address string `json:"address"` Address string `json:"address"`
PubKey *string `json:"pubKey"` PubKey *string `json:"pubKey"`
@ -14,17 +10,6 @@ type Account struct {
Balance []*Coin `json:"balance"` Balance []*Coin `json:"balance"`
} }
type ArrayValue struct {
Value []Value `json:"value"`
}
func (ArrayValue) IsValue() {}
type Attribute struct {
Key string `json:"key"`
Value Value `json:"value"`
}
type Auction struct { type Auction struct {
ID string `json:"id"` ID string `json:"id"`
Status string `json:"status"` Status string `json:"status"`
@ -68,52 +53,21 @@ type Bond struct {
Balance []*Coin `json:"balance"` Balance []*Coin `json:"balance"`
} }
type BooleanValue struct {
Value bool `json:"value"`
}
func (BooleanValue) IsValue() {}
type BytesValue struct {
Value string `json:"value"`
}
func (BytesValue) IsValue() {}
type Coin struct { type Coin struct {
Type string `json:"type"` Type string `json:"type"`
Quantity string `json:"quantity"` Quantity string `json:"quantity"`
} }
type FloatValue struct { type KeyValue struct {
Value float64 `json:"value"` Key string `json:"key"`
Value *Value `json:"value"`
} }
func (FloatValue) IsValue() {}
type IntValue struct {
Value int `json:"value"`
}
func (IntValue) IsValue() {}
type KeyValueInput struct { type KeyValueInput struct {
Key string `json:"key"` Key string `json:"key"`
Value *ValueInput `json:"value"` Value *ValueInput `json:"value"`
} }
type LinkValue struct {
Value Link `json:"value"`
}
func (LinkValue) IsValue() {}
type MapValue struct {
Value []*Attribute `json:"value"`
}
func (MapValue) IsValue() {}
type NameRecord struct { type NameRecord struct {
Latest *NameRecordEntry `json:"latest"` Latest *NameRecordEntry `json:"latest"`
History []*NameRecordEntry `json:"history"` History []*NameRecordEntry `json:"history"`
@ -142,14 +96,22 @@ type PeerInfo struct {
} }
type Record struct { type Record struct {
ID string `json:"id"` ID string `json:"id"`
Names []string `json:"names"` Names []string `json:"names"`
BondID string `json:"bondId"` BondID string `json:"bondId"`
CreateTime string `json:"createTime"` CreateTime string `json:"createTime"`
ExpiryTime string `json:"expiryTime"` ExpiryTime string `json:"expiryTime"`
Owners []string `json:"owners"` Owners []string `json:"owners"`
Attributes []*Attribute `json:"attributes"` Attributes []*KeyValue `json:"attributes"`
References []*Record `json:"references"` References []*Record `json:"references"`
}
type Reference struct {
ID string `json:"id"`
}
type ReferenceInput struct {
ID string `json:"id"`
} }
type Status struct { type Status struct {
@ -163,12 +125,6 @@ type Status struct {
DiskUsage string `json:"disk_usage"` DiskUsage string `json:"disk_usage"`
} }
type StringValue struct {
Value string `json:"value"`
}
func (StringValue) IsValue() {}
type SyncInfo struct { type SyncInfo struct {
LatestBlockHash string `json:"latest_block_hash"` LatestBlockHash string `json:"latest_block_hash"`
LatestBlockHeight string `json:"latest_block_height"` LatestBlockHeight string `json:"latest_block_height"`
@ -182,12 +138,23 @@ type ValidatorInfo struct {
ProposerPriority *string `json:"proposer_priority"` ProposerPriority *string `json:"proposer_priority"`
} }
type ValueInput struct { type Value struct {
Int *int `json:"int"` Null *bool `json:"null"`
Float *float64 `json:"float"` Int *int `json:"int"`
String *string `json:"string"` Float *float64 `json:"float"`
Boolean *bool `json:"boolean"` String *string `json:"string"`
Link *Link `json:"link"` Boolean *bool `json:"boolean"`
Array []*ValueInput `json:"array"` JSON *string `json:"json"`
Map []*KeyValueInput `json:"map"` Reference *Reference `json:"reference"`
Values []*Value `json:"values"`
}
type ValueInput struct {
Null *bool `json:"null"`
Int *int `json:"int"`
Float *float64 `json:"float"`
String *string `json:"string"`
Boolean *bool `json:"boolean"`
Reference *ReferenceInput `json:"reference"`
Values []*ValueInput `json:"values"`
} }

View File

@ -121,7 +121,7 @@ func (q queryResolver) QueryRecords(ctx context.Context, attributes []*KeyValueI
res, err := nsQueryClient.ListRecords( res, err := nsQueryClient.ListRecords(
context.Background(), context.Background(),
&registrytypes.QueryListRecordsRequest{ &registrytypes.QueryListRecordsRequest{
Attributes: toRPCAttributes(attributes), Attributes: parseRequestAttributes(attributes),
All: (all != nil && *all), All: (all != nil && *all),
}, },
) )

View File

@ -1,33 +0,0 @@
package gql
import (
"context"
"encoding/json"
"fmt"
"io"
)
// Represents an IPLD link. Links are generally but not necessarily implemented as CIDs
type Link string
func (l Link) String() string {
return string(l)
}
// UnmarshalGQLContext implements the graphql.ContextUnmarshaler interface
func (l *Link) UnmarshalGQLContext(_ context.Context, v interface{}) error {
s, ok := v.(string)
if !ok {
return fmt.Errorf("Link must be a string")
}
*l = Link(s)
return nil
}
// MarshalGQLContext implements the graphql.ContextMarshaler interface
func (l Link) MarshalGQLContext(_ context.Context, w io.Writer) error {
encodable := map[string]string{
"/": l.String(),
}
return json.NewEncoder(w).Encode(encodable)
}

View File

@ -2,15 +2,15 @@ package gql
import ( import (
"context" "context"
"fmt" // #nosec G702 "encoding/json"
"fmt"
"reflect" // #nosec G702
"strconv" "strconv"
auctiontypes "github.com/cerc-io/laconicd/x/auction/types" auctiontypes "github.com/cerc-io/laconicd/x/auction/types"
bondtypes "github.com/cerc-io/laconicd/x/bond/types" bondtypes "github.com/cerc-io/laconicd/x/bond/types"
registrytypes "github.com/cerc-io/laconicd/x/registry/types" registrytypes "github.com/cerc-io/laconicd/x/registry/types"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/ipld/go-ipld-prime"
"github.com/ipld/go-ipld-prime/codec/dagjson"
) )
// OwnerAttributeName denotes the owner attribute name for a bond. // OwnerAttributeName denotes the owner attribute name for a bond.
@ -61,21 +61,13 @@ func getGQLRecord(ctx context.Context, resolver QueryResolver, record registryty
return nil, nil return nil, nil
} }
node, err := ipld.Decode(record.Attributes, dagjson.Decode) recordType := record.ToRecordType()
if err != nil { attributes, err := getAttributes(&recordType)
return nil, err
}
if node.Kind() != ipld.Kind_Map {
return nil, fmt.Errorf("invalid record attributes")
}
var links []string
attributes, err := resolveIPLDNode(node, &links)
if err != nil { if err != nil {
return nil, err return nil, err
} }
references, err := resolver.GetRecordsByIds(ctx, links) references, err := getReferences(ctx, resolver, &recordType)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -87,96 +79,11 @@ func getGQLRecord(ctx context.Context, resolver QueryResolver, record registryty
ExpiryTime: record.GetExpiryTime(), ExpiryTime: record.GetExpiryTime(),
Owners: record.GetOwners(), Owners: record.GetOwners(),
Names: record.GetNames(), Names: record.GetNames(),
Attributes: attributes.(MapValue).Value, Attributes: attributes,
References: references, References: references,
}, nil }, nil
} }
func resolveIPLDNode(node ipld.Node, links *[]string) (Value, error) {
switch node.Kind() {
case ipld.Kind_Map:
var entries []*Attribute
for itr := node.MapIterator(); !itr.Done(); {
k, v, err := itr.Next()
if err != nil {
return nil, err
}
if k.Kind() != ipld.Kind_String {
return nil, fmt.Errorf("invalid record attribute key type: %s", k.Kind())
}
s, err := k.AsString()
if err != nil {
return nil, err
}
val, err := resolveIPLDNode(v, links)
if err != nil {
return nil, err
}
entries = append(entries, &Attribute{
Key: s,
Value: val,
})
}
return MapValue{entries}, nil
case ipld.Kind_List:
var values []Value
for itr := node.ListIterator(); !itr.Done(); {
_, v, err := itr.Next()
if err != nil {
return nil, err
}
val, err := resolveIPLDNode(v, links)
if err != nil {
return nil, err
}
values = append(values, val)
}
return ArrayValue{values}, nil
case ipld.Kind_Null:
return nil, nil
case ipld.Kind_Bool:
val, err := node.AsBool()
if err != nil {
return nil, err
}
return BooleanValue{val}, nil
case ipld.Kind_Int:
val, err := node.AsInt()
if err != nil {
return nil, err
}
// TODO: handle bigger ints
return IntValue{int(val)}, nil
case ipld.Kind_Float:
val, err := node.AsFloat()
if err != nil {
return nil, err
}
return FloatValue{val}, nil
case ipld.Kind_String:
val, err := node.AsString()
if err != nil {
return nil, err
}
return StringValue{val}, nil
case ipld.Kind_Bytes:
val, err := node.AsBytes()
if err != nil {
return nil, err
}
return BytesValue{string(val)}, nil
case ipld.Kind_Link:
val, err := node.AsLink()
if err != nil {
return nil, err
}
*links = append(*links, val.String())
return LinkValue{Link(val.String())}, nil
default:
return nil, fmt.Errorf("invalid node kind")
}
}
func getGQLNameRecord(record *registrytypes.NameRecord) (*NameRecord, error) { func getGQLNameRecord(record *registrytypes.NameRecord) (*NameRecord, error) {
if record == nil { if record == nil {
return nil, fmt.Errorf("got nil record") return nil, fmt.Errorf("got nil record")
@ -256,47 +163,136 @@ func GetGQLAuction(auction *auctiontypes.Auction, bids []*auctiontypes.Bid) (*Au
return &gqlAuction, nil return &gqlAuction, nil
} }
func toRPCValue(value *ValueInput) *registrytypes.QueryListRecordsRequest_ValueInput { func getReferences(ctx context.Context, resolver QueryResolver, r *registrytypes.RecordType) ([]*Record, error) {
var rpcval registrytypes.QueryListRecordsRequest_ValueInput var ids []string
switch { // #nosec G705
case value == nil: for key := range r.Attributes {
return nil //nolint: all
case value.Int != nil: switch r.Attributes[key].(type) {
rpcval.Value = &registrytypes.QueryListRecordsRequest_ValueInput_Int{Int: int64(*value.Int)} case interface{}:
case value.Float != nil: if obj, ok := r.Attributes[key].(map[string]interface{}); ok {
rpcval.Value = &registrytypes.QueryListRecordsRequest_ValueInput_Float{Float: *value.Float} if _, ok := obj["/"]; ok && len(obj) == 1 {
case value.String != nil: if _, ok := obj["/"].(string); ok {
rpcval.Value = &registrytypes.QueryListRecordsRequest_ValueInput_String_{String_: *value.String} ids = append(ids, obj["/"].(string))
case value.Boolean != nil: }
rpcval.Value = &registrytypes.QueryListRecordsRequest_ValueInput_Boolean{Boolean: *value.Boolean} }
case value.Link != nil: }
rpcval.Value = &registrytypes.QueryListRecordsRequest_ValueInput_Link{Link: value.Link.String()}
case value.Array != nil:
var contents registrytypes.QueryListRecordsRequest_ArrayInput
for _, val := range value.Array {
contents.Values = append(contents.Values, toRPCValue(val))
} }
rpcval.Value = &registrytypes.QueryListRecordsRequest_ValueInput_Array{Array: &contents}
case value.Map != nil:
var contents registrytypes.QueryListRecordsRequest_MapInput
for _, kv := range value.Map {
contents.Values[kv.Key] = toRPCValue(kv.Value)
}
rpcval.Value = &registrytypes.QueryListRecordsRequest_ValueInput_Map{Map: &contents}
} }
return &rpcval
return resolver.GetRecordsByIds(ctx, ids)
} }
func toRPCAttributes(attrs []*KeyValueInput) []*registrytypes.QueryListRecordsRequest_KeyValueInput { func getAttributes(r *registrytypes.RecordType) ([]*KeyValue, error) {
return mapToKeyValuePairs(r.Attributes)
}
func mapToKeyValuePairs(attrs map[string]interface{}) ([]*KeyValue, error) {
kvPairs := []*KeyValue{}
trueVal := true
falseVal := false
// #nosec G705
for key, value := range attrs {
kvPair := &KeyValue{
Key: key,
Value: &Value{},
}
switch val := value.(type) {
case nil:
kvPair.Value.Null = &trueVal
case int:
kvPair.Value.Int = &val
case float64:
kvPair.Value.Float = &val
case string:
kvPair.Value.String = &val
case bool:
kvPair.Value.Boolean = &val
case interface{}:
if obj, ok := value.(map[string]interface{}); ok {
if _, ok := obj["/"]; ok && len(obj) == 1 {
if _, ok := obj["/"].(string); ok {
kvPair.Value.Reference = &Reference{
ID: obj["/"].(string),
}
}
} else {
bytes, err := json.Marshal(obj)
if err != nil {
return nil, err
}
jsonStr := string(bytes)
kvPair.Value.JSON = &jsonStr
}
}
}
if kvPair.Value.Null == nil {
kvPair.Value.Null = &falseVal
}
valueType := reflect.ValueOf(value)
if valueType.Kind() == reflect.Slice {
bytes, err := json.Marshal(value)
if err != nil {
return nil, err
}
jsonStr := string(bytes)
kvPair.Value.JSON = &jsonStr
}
kvPairs = append(kvPairs, kvPair)
}
return kvPairs, nil
}
func parseRequestAttributes(attrs []*KeyValueInput) []*registrytypes.QueryListRecordsRequest_KeyValueInput {
kvPairs := []*registrytypes.QueryListRecordsRequest_KeyValueInput{} kvPairs := []*registrytypes.QueryListRecordsRequest_KeyValueInput{}
for _, value := range attrs { for _, value := range attrs {
parsedValue := toRPCValue(value.Value)
kvPair := &registrytypes.QueryListRecordsRequest_KeyValueInput{ kvPair := &registrytypes.QueryListRecordsRequest_KeyValueInput{
Key: value.Key, Key: value.Key,
Value: parsedValue, Value: &registrytypes.QueryListRecordsRequest_ValueInput{},
} }
if value.Value.String != nil {
kvPair.Value.String_ = *value.Value.String
kvPair.Value.Type = "string"
}
if value.Value.Int != nil {
kvPair.Value.Int = int64(*value.Value.Int)
kvPair.Value.Type = "int"
}
if value.Value.Float != nil {
kvPair.Value.Float = *value.Value.Float
kvPair.Value.Type = "float"
}
if value.Value.Boolean != nil {
kvPair.Value.Boolean = *value.Value.Boolean
kvPair.Value.Type = "boolean"
}
if value.Value.Reference != nil {
reference := &registrytypes.QueryListRecordsRequest_ReferenceInput{
Id: value.Value.Reference.ID,
}
kvPair.Value.Reference = reference
kvPair.Value.Type = "reference"
}
// TODO: Handle arrays.
kvPairs = append(kvPairs, kvPair) kvPairs = append(kvPairs, kvPair)
} }

View File

@ -1,5 +1,12 @@
# Reference to another record. # Reference to another record.
scalar Link type Reference {
id: String! # ID of linked record.
}
# Reference to another record.
input ReferenceInput {
id: String!
}
# Bonds contain funds that are used to pay rent on record registration and renewal. # Bonds contain funds that are used to pay rent on record registration and renewal.
type Bond { type Bond {
@ -30,71 +37,44 @@ type Account {
balance: [Coin!] # Current balance for each coin type. balance: [Coin!] # Current balance for each coin type.
} }
# Value describes a DAG-JSON compatible value. # Value of a given type.
union Value = type Value {
BooleanValue null: Boolean
| IntValue
| FloatValue
| StringValue
| BytesValue
| LinkValue
| ArrayValue
| MapValue
type BooleanValue {
value: Boolean!
}
type IntValue {
value: Int!
}
type FloatValue {
value: Float!
}
type StringValue {
value: String!
}
type BytesValue {
value: String!
}
type ArrayValue {
value: [Value]!
}
type LinkValue {
value: Link!
}
type MapValue {
value: [Attribute!]!
}
# Key/value pair.
type Attribute {
key: String!
value: Value
}
# Value of a given type used as input to queries.
# Note: GQL doesn't allow union input types.
input ValueInput {
int: Int int: Int
float: Float float: Float
string: String string: String
boolean: Boolean boolean: Boolean
link: Link json: String
array: [ValueInput]
map: [KeyValueInput!] reference: Reference
values: [Value]
}
# Value of a given type used as input to queries.
input ValueInput {
null: Boolean
int: Int
float: Float
string: String
boolean: Boolean
reference: ReferenceInput
values: [ValueInput]
}
# Key/value pair.
type KeyValue {
key: String!
value: Value!
} }
# Key/value pair for inputs. # Key/value pair for inputs.
input KeyValueInput { input KeyValueInput {
key: String! key: String!
value: ValueInput value: ValueInput!
} }
# Status information about a node (https://docs.tendermint.com/master/rpc/#/Info/status). # Status information about a node (https://docs.tendermint.com/master/rpc/#/Info/status).
@ -175,7 +155,7 @@ type Record {
createTime: String! # Record create time. createTime: String! # Record create time.
expiryTime: String! # Record expiry time. expiryTime: String! # Record expiry time.
owners: [String!] # Addresses of record owners. owners: [String!] # Addresses of record owners.
attributes: [Attribute!] # Record attributes. attributes: [KeyValue] # Record attributes.
references: [Record] # Record references. references: [Record] # Record references.
} }
@ -215,7 +195,7 @@ type Query {
getBondsByIds(ids: [String!]): [Bond] getBondsByIds(ids: [String!]): [Bond]
# Query bonds. # Query bonds.
queryBonds(attributes: [KeyValueInput!]): [Bond] queryBonds(attributes: [KeyValueInput]): [Bond]
# Query bonds by owner. # Query bonds by owner.
queryBondsByOwner(ownerAddresses: [String!]): [OwnerBonds] queryBondsByOwner(ownerAddresses: [String!]): [OwnerBonds]
@ -230,7 +210,7 @@ type Query {
# Query records. # Query records.
queryRecords( queryRecords(
# Multiple attribute conditions are in a logical AND. # Multiple attribute conditions are in a logical AND.
attributes: [KeyValueInput!] attributes: [KeyValueInput]
# Whether to query all records, not just named ones (false by default). # Whether to query all records, not just named ones (false by default).
all: Boolean all: Boolean

187
init.sh
View File

@ -5,137 +5,110 @@ CHAINID="laconic_9000-1"
MONIKER="localtestnet" MONIKER="localtestnet"
KEYRING="test" KEYRING="test"
KEYALGO="eth_secp256k1" KEYALGO="eth_secp256k1"
LOGLEVEL="${LOGLEVEL:-info}" LOGLEVEL="info"
# trace evm # trace evm
TRACE="--trace" TRACE="--trace"
# TRACE="" # TRACE=""
if [ "$1" == "clean" ] || [ ! -d "$HOME/.laconicd/data/blockstore.db" ]; then # validate dependencies are installed
# validate dependencies are installed command -v jq > /dev/null 2>&1 || { echo >&2 "jq not installed. More info: https://stedolan.github.io/jq/download/"; exit 1; }
command -v jq > /dev/null 2>&1 || {
echo >&2 "jq not installed. More info: https://stedolan.github.io/jq/download/"
exit 1
}
# remove existing daemon and client # remove existing daemon and client
rm -rf $HOME/.laconicd/* rm -rf ~/.laconic*
rm -rf $HOME/.laconic/*
if [ -n "`which make`" ]; then make install
make install
fi
laconicd config keyring-backend $KEYRING laconicd config keyring-backend $KEYRING
laconicd config chain-id $CHAINID laconicd config chain-id $CHAINID
# if $KEY exists it should be deleted # if $KEY exists it should be deleted
laconicd keys add $KEY --keyring-backend $KEYRING --algo $KEYALGO laconicd keys add $KEY --keyring-backend $KEYRING --algo $KEYALGO
# Set moniker and chain-id for Ethermint (Moniker can be anything, chain-id must be an integer) # Set moniker and chain-id for Ethermint (Moniker can be anything, chain-id must be an integer)
laconicd init $MONIKER --chain-id $CHAINID laconicd init $MONIKER --chain-id $CHAINID
update_genesis() { # Change parameter token denominations to aphoton
jq "$1" $HOME/.laconicd/config/genesis.json > $HOME/.laconicd/config/tmp_genesis.json && cat $HOME/.laconicd/config/genesis.json | jq '.app_state["staking"]["params"]["bond_denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json cat $HOME/.laconicd/config/genesis.json | jq '.app_state["crisis"]["constant_fee"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
} cat $HOME/.laconicd/config/genesis.json | jq '.app_state["gov"]["deposit_params"]["min_deposit"][0]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["mint"]["params"]["mint_denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
# Custom modules
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["record_rent"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_rent"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_commit_fee"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_reveal_fee"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_minimum_bid"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
# Change parameter token denominations to aphoton if [[ "$TEST_REGISTRY_EXPIRY" == "true" ]]; then
update_genesis '.app_state["staking"]["params"]["bond_denom"]="aphoton"' echo "Setting timers for expiry tests."
update_genesis '.app_state["crisis"]["constant_fee"]["denom"]="aphoton"'
update_genesis '.app_state["gov"]["deposit_params"]["min_deposit"][0]["denom"]="aphoton"'
update_genesis '.app_state["mint"]["params"]["mint_denom"]="aphoton"'
# Custom modules
update_genesis '.app_state["registry"]["params"]["record_rent"]["denom"]="aphoton"'
update_genesis '.app_state["registry"]["params"]["authority_rent"]["denom"]="aphoton"'
update_genesis '.app_state["registry"]["params"]["authority_auction_commit_fee"]["denom"]="aphoton"'
update_genesis '.app_state["registry"]["params"]["authority_auction_reveal_fee"]["denom"]="aphoton"'
update_genesis '.app_state["registry"]["params"]["authority_auction_minimum_bid"]["denom"]="aphoton"'
if [[ "$TEST_REGISTRY_EXPIRY" == "true" ]]; then cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["record_rent_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
echo "Setting timers for expiry tests." cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_grace_period"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_rent_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
fi
update_genesis '.app_state["registry"]["params"]["record_rent_duration"]="60s"' if [[ "$TEST_AUCTION_ENABLED" == "true" ]]; then
update_genesis '.app_state["registry"]["params"]["authority_grace_period"]="60s"' echo "Enabling auction and setting timers."
update_genesis '.app_state["registry"]["params"]["authority_rent_duration"]="60s"'
fi
if [[ "$TEST_AUCTION_ENABLED" == "true" ]]; then cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_enabled"]=true' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
echo "Enabling auction and setting timers." cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_rent_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_grace_period"]="300s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_commits_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_reveals_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
fi
update_genesis '.app_state["registry"]["params"]["authority_auction_enabled"]=true' # increase block time (?)
update_genesis '.app_state["registry"]["params"]["authority_rent_duration"]="60s"' cat $HOME/.laconicd/config/genesis.json | jq '.consensus_params["block"]["time_iota_ms"]="1000"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
update_genesis '.app_state["registry"]["params"]["authority_grace_period"]="300s"'
update_genesis '.app_state["registry"]["params"]["authority_auction_commits_duration"]="60s"'
update_genesis '.app_state["registry"]["params"]["authority_auction_reveals_duration"]="60s"'
fi
# increase block time (?) # Set gas limit in genesis
update_genesis '.consensus_params["block"]["time_iota_ms"]="1000"' cat $HOME/.laconicd/config/genesis.json | jq '.consensus_params["block"]["max_gas"]="10000000"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json
# Set gas limit in genesis # disable produce empty block
update_genesis '.consensus_params["block"]["max_gas"]="10000000"' if [[ "$OSTYPE" == "darwin"* ]]; then
sed -i '' 's/create_empty_blocks = true/create_empty_blocks = false/g' $HOME/.laconicd/config/config.toml
# disable produce empty block
if [[ "$OSTYPE" == "darwin"* ]]; then
sed -i '' 's/create_empty_blocks = true/create_empty_blocks = false/g' $HOME/.laconicd/config/config.toml
else
sed -i 's/create_empty_blocks = true/create_empty_blocks = false/g' $HOME/.laconicd/config/config.toml
fi
if [[ "$1" == "pending" ]]; then
alias sed-i="sed -i"
if [[ "$OSTYPE" == "darwin"* ]]; then
alias sed-i="sed -i ''"
fi
sed-i \
-e 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' \
-e 's/timeout_propose = "3s"/timeout_propose = "30s"/g' \
-e 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' \
-e 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' \
-e 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' \
-e 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' \
-e 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' \
-e 's/timeout_commit = "5s"/timeout_commit = "150s"/g' \
-e 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' \
$HOME/.laconicd/config/config.toml
fi
# Enable telemetry (prometheus metrics: http://localhost:1317/metrics?format=prometheus)
if [[ "$OSTYPE" == "darwin"* ]]; then
sed -i '' 's/enabled = false/enabled = true/g' $HOME/.laconicd/config/app.toml
sed -i '' 's/prometheus-retention-time = 0/prometheus-retention-time = 60/g' $HOME/.laconicd/config/app.toml
sed -i '' 's/prometheus = false/prometheus = true/g' $HOME/.laconicd/config/config.toml
else else
sed -i 's/enabled = false/enabled = true/g' $HOME/.laconicd/config/app.toml sed -i 's/create_empty_blocks = true/create_empty_blocks = false/g' $HOME/.laconicd/config/config.toml
sed -i 's/prometheus-retention-time = 0/prometheus-retention-time = 60/g' $HOME/.laconicd/config/app.toml fi
sed -i 's/prometheus = false/prometheus = true/g' $HOME/.laconicd/config/config.toml
if [[ $1 == "pending" ]]; then
if [[ "$OSTYPE" == "darwin"* ]]; then
sed -i '' 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' $HOME/.laconicd/config/config.toml
sed -i '' 's/timeout_propose = "3s"/timeout_propose = "30s"/g' $HOME/.laconicd/config/config.toml
sed -i '' 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' $HOME/.laconicd/config/config.toml
sed -i '' 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' $HOME/.laconicd/config/config.toml
sed -i '' 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' $HOME/.laconicd/config/config.toml
sed -i '' 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' $HOME/.laconicd/config/config.toml
sed -i '' 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' $HOME/.laconicd/config/config.toml
sed -i '' 's/timeout_commit = "5s"/timeout_commit = "150s"/g' $HOME/.laconicd/config/config.toml
sed -i '' 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' $HOME/.laconicd/config/config.toml
else
sed -i 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' $HOME/.laconicd/config/config.toml
sed -i 's/timeout_propose = "3s"/timeout_propose = "30s"/g' $HOME/.laconicd/config/config.toml
sed -i 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' $HOME/.laconicd/config/config.toml
sed -i 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' $HOME/.laconicd/config/config.toml
sed -i 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' $HOME/.laconicd/config/config.toml
sed -i 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' $HOME/.laconicd/config/config.toml
sed -i 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' $HOME/.laconicd/config/config.toml
sed -i 's/timeout_commit = "5s"/timeout_commit = "150s"/g' $HOME/.laconicd/config/config.toml
sed -i 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' $HOME/.laconicd/config/config.toml
fi fi
fi
# Allocate genesis accounts (cosmos formatted addresses) # Allocate genesis accounts (cosmos formatted addresses)
laconicd add-genesis-account $KEY 100000000000000000000000000aphoton --keyring-backend $KEYRING laconicd add-genesis-account $KEY 100000000000000000000000000aphoton --keyring-backend $KEYRING
# Sign genesis transaction # Sign genesis transaction
laconicd gentx $KEY 1000000000000000000000aphoton --keyring-backend $KEYRING --chain-id $CHAINID laconicd gentx $KEY 1000000000000000000000aphoton --keyring-backend $KEYRING --chain-id $CHAINID
# Collect genesis tx # Collect genesis tx
laconicd collect-gentxs laconicd collect-gentxs
# Run this to ensure everything worked and that the genesis file is setup correctly # Run this to ensure everything worked and that the genesis file is setup correctly
laconicd validate-genesis laconicd validate-genesis
if [[ "$1" == "pending" ]]; then if [[ $1 == "pending" ]]; then
echo "pending mode is on, please wait for the first block committed." echo "pending mode is on, please wait for the first block committed."
fi
else
echo "Using existing database at $HOME/.laconicd. To replace, run '`basename $0` clean'"
fi fi
# Start the node (remove the --pruning=nothing flag if historical queries are not needed) # Start the node (remove the --pruning=nothing flag if historical queries are not needed)
laconicd start \ laconicd start --pruning=nothing --evm.tracer=json $TRACE --log_level $LOGLEVEL --minimum-gas-prices=0.0001aphoton --json-rpc.api eth,txpool,personal,net,debug,web3,miner --api.enable --gql-server --gql-playground
--pruning=nothing \
--evm.tracer=json $TRACE \
--log_level $LOGLEVEL \
--minimum-gas-prices=0.0001aphoton \
--json-rpc.api eth,txpool,personal,net,debug,web3,miner \
--api.enable \
--gql-server --gql-playground

View File

@ -0,0 +1,113 @@
syntax = "proto3";
package vulcanize.registry.v1beta1;
import "gogoproto/gogo.proto";
option go_package = "github.com/cerc-io/laconicd/x/registry/types";
message HashReference{
string ref =1 [(gogoproto.jsontag) = "/"];
}
message ServiceProviderRecord {
string bond_id = 1 [(gogoproto.moretags) = "json:\"bondId\" yaml:\"bondId\""];
string laconic_id = 2 [(gogoproto.moretags) = "json:\"laconicId\" yaml:\"laconicId\""];
X500 x500 = 3 [(gogoproto.moretags) = "json:\"x500\" yaml:\"x500\""];
string type = 4 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string version = 6 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
message X500 {
string common_name = 1 [(gogoproto.moretags) = "json:\"commonName\" yaml:\"commonName\""];
string organization_unit = 2 [(gogoproto.moretags) = "json:\"organizationUnit\" yaml:\"organizationUnit\""];
string organization_name = 3 [(gogoproto.moretags) = "json:\"organizationName\" yaml:\"organizationName\""];
string locality_name = 4 [(gogoproto.moretags) = "json:\"localityName\" yaml:\"localityName\""];
string state_name = 5 [(gogoproto.moretags) = "json:\"stateName\" yaml:\"stateName\""];
string country = 6 [(gogoproto.moretags) = "json:\"country\" yaml:\"country\""];
}
}
message WebsiteRegistrationRecord {
string url = 1 [(gogoproto.moretags) = "json:\"url\" yaml:\"url\""];
HashReference repo_reference = 2
[(gogoproto.moretags) = "json:\"repoReference\" yaml:\"repoReference\""];
HashReference build_artifact_ref = 3 [(gogoproto.moretags) = "json:\"buildArtifactRef\" yaml:\"buildArtifactRef\""];
HashReference tls_cert_ref = 4 [(gogoproto.moretags) = "json:\"tlsCertRef\" yaml:\"tlsCertRef\""];
string type = 5 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string version = 6 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
}
message GitRepository{
string name =1 [(gogoproto.moretags) = "json:\"name\" yaml:\"name\""];
string repo_reference=2 [(gogoproto.moretags) = "json:\"repo_reference\" yaml:\"repo_reference\""];
string description=3 [(gogoproto.moretags) = "json:\"description\" yaml:\"description\""];
string version = 4 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string type =5 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
}
message Binary{
HashReference hash_reference=1;
string targeted_arch=2;
string runtime_version=3;
HashReference repo_reference=4;
string version=5;
string type=6;
}
message DockerImage{
string image_id=1;
HashReference binary_reference=2;
HashReference repo_reference=3;
string version=4;
string type=5;
}
message WatcherRegistrationRecord{
WatcherMetadata metadata =1;
HashReference repo_reference=2;
WASMBinary wasm=3;
string version=4;
string type=5;
message WatcherMetadata{
string version=1;
HashReference chain_reference=2;
}
message WASMBinary{
HashReference hash_reference=1;
WASMBinaryMetadata metadata=2;
}
message WASMBinaryMetadata{
string compiler_version=1;
string execution_engine_version=2;
}
}
message ResponderContract{
HashReference service_provider_ref=1;
HashReference auction_ref=2;
HashReference watcher_ref=3;
string version=4;
string type=5;
}
message JSPackage{
HashReference repo_reference=1;
HashReference js_package_ref=2;
string version=3;
string type=4;
string name =5;
}
message ChainRegistrationRecord{
string name=1;
repeated string ipld_types=2;
string type=3;
string version=4;
string chain_id=5;
string network_id=6;
HashReference genesis_hash=7;
}

View File

@ -67,26 +67,17 @@ message QueryParamsResponse {
// QueryListRecordsRequest is request type for registry records list // QueryListRecordsRequest is request type for registry records list
message QueryListRecordsRequest { message QueryListRecordsRequest {
message LinkInput { message ReferenceInput {
string id = 1; string id = 1;
} }
message ArrayInput {
repeated ValueInput values = 1;
}
message MapInput {
map<string, ValueInput> values = 1;
}
message ValueInput { message ValueInput {
// Type of record attribute value string type = 1;
oneof value { string string = 2;
string string = 1; int64 int = 3;
int64 int = 2; double float = 4;
double float = 3; bool boolean = 5;
bool boolean = 4; ReferenceInput reference = 6;
string link = 5; repeated ValueInput values = 7;
ArrayInput array = 6;
MapInput map = 7;
}
} }
message KeyValueInput { message KeyValueInput {
string key = 1; string key = 1;

View File

@ -5,6 +5,7 @@ import "google/protobuf/duration.proto";
import "google/protobuf/timestamp.proto"; import "google/protobuf/timestamp.proto";
import "gogoproto/gogo.proto"; import "gogoproto/gogo.proto";
import "cosmos/base/v1beta1/coin.proto"; import "cosmos/base/v1beta1/coin.proto";
import "google/protobuf/any.proto";
option go_package = "github.com/cerc-io/laconicd/x/registry/types"; option go_package = "github.com/cerc-io/laconicd/x/registry/types";
@ -55,7 +56,7 @@ message Params {
]; ];
} }
// Record defines a registry record // Params defines the registry module records
message Record { message Record {
string id = 1 [(gogoproto.moretags) = "json:\"id\" yaml:\"id\""]; string id = 1 [(gogoproto.moretags) = "json:\"id\" yaml:\"id\""];
string bond_id = 2 [(gogoproto.moretags) = "json:\"bondId\" yaml:\"bondId\""]; string bond_id = 2 [(gogoproto.moretags) = "json:\"bondId\" yaml:\"bondId\""];
@ -63,12 +64,12 @@ message Record {
string expiry_time = 4 [(gogoproto.moretags) = "json:\"expiryTime\" yaml:\"expiryTime\""]; string expiry_time = 4 [(gogoproto.moretags) = "json:\"expiryTime\" yaml:\"expiryTime\""];
bool deleted = 5; bool deleted = 5;
repeated string owners = 6 [(gogoproto.moretags) = "json:\"owners\" yaml:\"owners\""]; repeated string owners = 6 [(gogoproto.moretags) = "json:\"owners\" yaml:\"owners\""];
bytes attributes = 7 [(gogoproto.moretags) = "json:\"attributes\" yaml:\"attributes\""]; google.protobuf.Any attributes = 7 [(gogoproto.moretags) = "json:\"attributes\" yaml:\"attributes\""];
repeated string names = 8 [(gogoproto.moretags) = "json:\"names\" yaml:\"names\""]; repeated string names = 8 [(gogoproto.moretags) = "json:\"names\" yaml:\"names\""];
string type = 9 [(gogoproto.moretags) = "json:\"types\" yaml:\"types\""]; string type = 9 [(gogoproto.moretags) = "json:\"types\" yaml:\"types\""];
} }
// AuthorityEntry defines a registry authority // AuthorityEntry defines the registry module AuthorityEntries
message AuthorityEntry { message AuthorityEntry {
string name = 1; string name = 1;
NameAuthority entry = 2; NameAuthority entry = 2;
@ -98,7 +99,7 @@ message NameEntry {
NameRecord entry = 2; NameRecord entry = 2;
} }
// NameRecord defines a versioned name record // NameRecord
message NameRecord { message NameRecord {
NameRecordEntry latest = 1; NameRecordEntry latest = 1;
repeated NameRecordEntry history = 2; repeated NameRecordEntry history = 2;
@ -130,4 +131,4 @@ message BlockChangeSet {
message AuctionBidInfo { message AuctionBidInfo {
string auction_id = 1 [(gogoproto.moretags) = "json:\"auctionID\" yaml:\"auctionID\""]; string auction_id = 1 [(gogoproto.moretags) = "json:\"auctionID\" yaml:\"auctionID\""];
string bidder_address = 2 [(gogoproto.moretags) = "json:\"bidderAddress\" yaml:\"bidderAddress\""]; string bidder_address = 2 [(gogoproto.moretags) = "json:\"bidderAddress\" yaml:\"bidderAddress\""];
} }

View File

@ -7,13 +7,13 @@ import "vulcanize/registry/v1beta1/registry.proto";
option go_package = "github.com/cerc-io/laconicd/x/registry/types"; option go_package = "github.com/cerc-io/laconicd/x/registry/types";
// Msg is a service which exposes the registry functionality // Msg
service Msg { service Msg {
// SetRecord records a new record with given payload and bond id // SetRecord will records a new record with given payload and bond id
rpc SetRecord(MsgSetRecord) returns (MsgSetRecordResponse) { rpc SetRecord(MsgSetRecord) returns (MsgSetRecordResponse) {
option (google.api.http).post = "/vulcanize/registry/v1beta1/set_record"; option (google.api.http).post = "/vulcanize/registry/v1beta1/set_record";
} }
// Renew Record renews an expired record // Renew Record will renew the expire record
rpc RenewRecord(MsgRenewRecord) returns (MsgRenewRecordResponse) { rpc RenewRecord(MsgRenewRecord) returns (MsgRenewRecordResponse) {
option (google.api.http).post = "/vulcanize/registry/v1beta1/renew_record"; option (google.api.http).post = "/vulcanize/registry/v1beta1/renew_record";
} }
@ -66,10 +66,8 @@ message MsgSetRecordResponse {
// Payload // Payload
message Payload { message Payload {
Record record = 1; Record record = 1;
repeated Signature signatures = 2 [ repeated Signature signatures = 2
(gogoproto.nullable) = false, [(gogoproto.nullable) = false, (gogoproto.moretags) = "json:\"signatures\" yaml:\"signatures\""];
(gogoproto.moretags) = "json:\"signatures\" yaml:\"signatures\""
];
} }
// MsgSetName // MsgSetName
@ -93,7 +91,7 @@ message MsgReserveAuthority {
// MsgReserveNameResponse // MsgReserveNameResponse
message MsgReserveAuthorityResponse {} message MsgReserveAuthorityResponse {}
// MsgSetAuthorityBond // MsgSetAuthorityBond is SDK message for SetAuthorityBond
message MsgSetAuthorityBond { message MsgSetAuthorityBond {
string name = 1; string name = 1;
string bond_id = 2 [(gogoproto.moretags) = "json:\"bondId\" yaml:\"bondId\""]; string bond_id = 2 [(gogoproto.moretags) = "json:\"bondId\" yaml:\"bondId\""];
@ -103,7 +101,7 @@ message MsgSetAuthorityBond {
// MsgSetAuthorityBondResponse // MsgSetAuthorityBondResponse
message MsgSetAuthorityBondResponse {} message MsgSetAuthorityBondResponse {}
// MsgDeleteNameAuthority // MsgDeleteNameAuthority is SDK message for DeleteNameAuthority
message MsgDeleteNameAuthority { message MsgDeleteNameAuthority {
string crn = 1; string crn = 1;
string signer = 2; string signer = 2;
@ -112,7 +110,7 @@ message MsgDeleteNameAuthority {
// MsgDeleteNameAuthorityResponse // MsgDeleteNameAuthorityResponse
message MsgDeleteNameAuthorityResponse {} message MsgDeleteNameAuthorityResponse {}
// MsgRenewRecord // MsgRenewRecord is SDK message for Renew a record
message MsgRenewRecord { message MsgRenewRecord {
string record_id = 1 [(gogoproto.moretags) = "json:\"recordId\" yaml:\"recordId\""]; string record_id = 1 [(gogoproto.moretags) = "json:\"recordId\" yaml:\"recordId\""];
string signer = 2; string signer = 2;
@ -131,30 +129,30 @@ message MsgAssociateBond {
// MsgAssociateBondResponse // MsgAssociateBondResponse
message MsgAssociateBondResponse {} message MsgAssociateBondResponse {}
// MsgDissociateBond // MsgDissociateBond is SDK message for Msg/DissociateBond
message MsgDissociateBond { message MsgDissociateBond {
string record_id = 1 [(gogoproto.moretags) = "json:\"recordId\" yaml:\"recordId\""]; string record_id = 1 [(gogoproto.moretags) = "json:\"recordId\" yaml:\"recordId\""];
string signer = 2; string signer = 2;
} }
// MsgDissociateBondResponse // MsgDissociateBondResponse is response type for MsgDissociateBond
message MsgDissociateBondResponse {} message MsgDissociateBondResponse {}
// MsgDissociateRecords // MsgDissociateRecords is SDK message for Msg/DissociateRecords
message MsgDissociateRecords { message MsgDissociateRecords {
string bond_id = 1 [(gogoproto.moretags) = "json:\"bondId\" yaml:\"bondId\""]; string bond_id = 1 [(gogoproto.moretags) = "json:\"bondId\" yaml:\"bondId\""];
string signer = 2; string signer = 2;
} }
// MsgDissociateRecordsResponse // MsgDissociateRecordsResponse is response type for MsgDissociateRecords
message MsgDissociateRecordsResponse {} message MsgDissociateRecordsResponse {}
// MsgReAssociateRecords // MsgReAssociateRecords is SDK message for Msg/ReAssociateRecords
message MsgReAssociateRecords { message MsgReAssociateRecords {
string new_bond_id = 1 [(gogoproto.moretags) = "json:\"newBondId\" yaml:\"newBondId\""]; string new_bond_id = 1 [(gogoproto.moretags) = "json:\"newBondId\" yaml:\"newBondId\""];
string old_bond_id = 2 [(gogoproto.moretags) = "json:\"oldBondId\" yaml:\"oldBondId\""]; string old_bond_id = 2 [(gogoproto.moretags) = "json:\"oldBondId\" yaml:\"oldBondId\""];
string signer = 3; string signer = 3;
} }
// MsgReAssociateRecordsResponse // MsgReAssociateRecordsResponse is response type for MsgReAssociateRecords
message MsgReAssociateRecordsResponse {} message MsgReAssociateRecordsResponse {}

View File

@ -94,10 +94,11 @@ func (s *websocketsServer) Start() {
go func() { go func() {
var err error var err error
/* #nosec G114 -- http functions have no support for timeouts */
if s.certFile == "" || s.keyFile == "" { if s.certFile == "" || s.keyFile == "" {
err = http.ListenAndServe(s.wsAddr, ws) /* #nosec G114 -- http functions have no support for timeouts */ err = http.ListenAndServe(s.wsAddr, ws)
} else { } else {
err = http.ListenAndServeTLS(s.wsAddr, s.certFile, s.keyFile, ws) // #nosec G114 err = http.ListenAndServeTLS(s.wsAddr, s.certFile, s.keyFile, ws)
} }
if err != nil { if err != nil {

View File

@ -1,48 +0,0 @@
#!/bin/bash
##
## This script generates a new account keypair, transfers funds to it, and creates
## a bond with those funds. The amount of can be optionally specified, else a
## default value estimated to be sufficient for about 5000 records.
##
## The `laconic` CLI and a valid config file for it are required. The default location
## for the config file is `~/.laconic/config.yml`, but this can be controlled with
## the environment variable LACONIC_CONFIG. A `userKey` for a source account with
## sufficient funds available must be present in this file for the funds transfer
## to succeed.
##
## Example:
##
## $ scripts/create-and-fund-account.sh 1000000000
## {
## "name": "68334d7175fd4f86befa4902657e5270",
## "type": "local",
## "address": "ethm15r5x94km0swq55aszwd7hnr9wksq7wmr38xes7",
## "pubkey": "AuKqlSldJJXj4gYMFt2HeX9DJ3aUosYA7n6zBz9Tg7/i",
## "mnemonic": "umbrella bean special unaware accident giant distance mix ghost feel possible cost road grant endless man maple derive rebuild learn mask water attract resist",
## "bond": "3d3a73f09115d289d330781455e6eac217305dc4a20e19bde808011fe3775a93",
## "balance": 1000000000,
## "privkey": "480880fde7aff1461da584b436cb3a84692413c84623fda78e127bb4e704ce76"
## }
##
AVG_RECORD_PHOTON=1000000
NUM_RECORDS=5000
BOND_OVERHEAD=200000
KEYNAME=`uuidgen | tr -d '-'`
KEYRING_DIR=`mktemp -d`
KEYRING=test
LACONIC_CONFIG=${LACONIC_CONFIG:-$HOME/.laconic/config.yml}
BOND_AMOUNT=${1:-$((AVG_RECORD_PHOTON * NUM_RECORDS))}
ACCOUNT_JSON=$(laconicd keys add $KEYNAME --keyring-backend $KEYRING --algo eth_secp256k1 --keyring-dir $KEYRING_DIR --output json)
PRIVATE_KEY=$(yes | laconicd keys export $KEYNAME --keyring-backend $KEYRING --keyring-dir $KEYRING_DIR --unarmored-hex --unsafe)
PUB_KEY=$(echo $ACCOUNT_JSON | jq -r ".pubkey | fromjson | .key")
laconicd keys delete $KEYNAME --keyring-backend $KEYRING --keyring-dir $KEYRING_DIR -y 2> /dev/null
rm -rf $KEYRING_DIR
laconic -c $LACONIC_CONFIG cns tokens send --address $(echo $ACCOUNT_JSON | jq -r '.address') --type aphoton --quantity $((BOND_AMOUNT + BOND_OVERHEAD)) > /dev/null
BOND_ID=$(laconic -c $LACONIC_CONFIG cns bond create --user-key $PRIVATE_KEY --type aphoton --quantity $BOND_AMOUNT | jq -r '.bondId')
echo $ACCOUNT_JSON | jq ".bond = \"$BOND_ID\"" | jq ".balance = $BOND_AMOUNT" | jq ".privkey = \"$PRIVATE_KEY\"" | jq ".pubkey = \"$PUB_KEY\""

View File

@ -146,7 +146,7 @@ echo "done sleeping"
set +e set +e
if [[ -z $TEST || $TEST == "rpc" || $TEST == "pending" ]]; then if [[ -z $TEST || $TEST == "rpc" || $TEST == "pending" ]]; then
time_out=900s time_out=300s
if [[ $TEST == "pending" ]]; then if [[ $TEST == "pending" ]]; then
time_out=60m0s time_out=60m0s
fi fi

View File

@ -0,0 +1,52 @@
# Originally from: https://github.com/devcontainers/images/blob/main/src/javascript-node/.devcontainer/Dockerfile
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
ARG VARIANT=16-bullseye
FROM node:${VARIANT}
ARG USERNAME=node
ARG NPM_GLOBAL=/usr/local/share/npm-global
# Add NPM global to PATH.
ENV PATH=${NPM_GLOBAL}/bin:${PATH}
RUN \
# Configure global npm install location, use group to adapt to UID/GID changes
if ! cat /etc/group | grep -e "^npm:" > /dev/null 2>&1; then groupadd -r npm; fi \
&& usermod -a -G npm ${USERNAME} \
&& umask 0002 \
&& mkdir -p ${NPM_GLOBAL} \
&& touch /usr/local/etc/npmrc \
&& chown ${USERNAME}:npm ${NPM_GLOBAL} /usr/local/etc/npmrc \
&& chmod g+s ${NPM_GLOBAL} \
&& npm config -g set prefix ${NPM_GLOBAL} \
&& su ${USERNAME} -c "npm config -g set prefix ${NPM_GLOBAL}" \
# Install eslint
&& su ${USERNAME} -c "umask 0002 && npm install -g eslint" \
&& npm cache clean --force > /dev/null 2>&1
# [Optional] Uncomment this section to install additional OS packages.
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
# && apt-get -y install --no-install-recommends <your-package-list-here>
# [Optional] Uncomment if you want to install an additional version of node using nvm
# ARG EXTRA_NODE_VERSION=10
# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
# [Optional] Uncomment if you want to install more global node modules
# RUN su node -c "npm install -g <your-package-list-here>"
WORKDIR /
COPY entrypoint.sh .
ENTRYPOINT ["/entrypoint.sh"]
# Placeholder CMD : generally this will be overridden at run time like :
# docker run -it -v /home/builder/cerc/laconic-sdk:/workspace cerc/builder-js sh -c 'cd /workspace && yarn && yarn build'
CMD node --version
# Temp hack, clone the laconic-sdk repo here
WORKDIR /app
RUN \
git clone https://github.com/cerc-io/laconic-sdk.git \
&& cd laconic-sdk \
&& yarn install
WORKDIR /app/laconic-sdk

View File

@ -1,2 +1,3 @@
#!/usr/bin/env bash #!/usr/bin/env bash
docker build -t cerc/laconicd:local ../.. --progress=plain docker build -t cerc-io/laconicd:local-test ../../

View File

@ -0,0 +1,3 @@
#!/usr/bin/env bash
docker build -t cerc-io/laconic-sdk-tester:local-test -f Dockerfile-sdk .

View File

@ -0,0 +1,31 @@
services:
laconicd:
restart: unless-stopped
image: cerc-io/laconicd:local-test
environment:
- TEST_AUCTION_ENABLED=true
command: ["sh", "/docker-entrypoint-scripts.d/create-fixturenet.sh"]
volumes:
- ../../init.sh:/docker-entrypoint-scripts.d/create-fixturenet.sh
healthcheck:
test: ["CMD", "curl", "-v", "http://127.0.0.1:6060"]
interval: 1s
timeout: 5s
retries: 30
ports:
- "6060"
- "26657"
- "26656"
- "9473"
- "8545"
- "8546"
- "9090"
- "9091"
- "1317"
sdk-test-runner:
image: cerc-io/laconic-sdk-tester:local-test
depends_on:
laconicd:
condition: service_healthy
command: tail -F /dev/null

View File

@ -0,0 +1,31 @@
services:
laconicd:
restart: unless-stopped
image: cerc-io/laconicd:local-test
environment:
- TEST_REGISTRY_EXPIRY=true
command: ["sh", "/docker-entrypoint-scripts.d/create-fixturenet.sh"]
volumes:
- ../../init.sh:/docker-entrypoint-scripts.d/create-fixturenet.sh
healthcheck:
test: ["CMD", "curl", "-v", "http://127.0.0.1:6060"]
interval: 1s
timeout: 5s
retries: 30
ports:
- "6060"
- "26657"
- "26656"
- "9473"
- "8545"
- "8546"
- "9090"
- "9091"
- "1317"
sdk-test-runner:
image: cerc-io/laconic-sdk-tester:local-test
depends_on:
laconicd:
condition: service_healthy
command: tail -F /dev/null

View File

@ -1,14 +1,10 @@
services: services:
laconicd: laconicd:
restart: unless-stopped restart: unless-stopped
image: cerc/laconicd:local image: cerc-io/laconicd:local-test
entrypoint: ["sh", "/docker-entrypoint-scripts.d/create-fixturenet.sh"] command: ["sh", "/docker-entrypoint-scripts.d/create-fixturenet.sh"]
environment:
- TEST_AUCTION_ENABLED
- TEST_REGISTRY_EXPIRY
- LOGLEVEL
volumes: volumes:
- ../../init.sh:/docker-entrypoint-scripts.d/create-fixturenet.sh - ../../init.sh:/docker-entrypoint-scripts.d/create-fixturenet.sh
healthcheck: healthcheck:
test: ["CMD", "curl", "-v", "http://127.0.0.1:6060"] test: ["CMD", "curl", "-v", "http://127.0.0.1:6060"]
interval: 1s interval: 1s
@ -26,7 +22,7 @@ services:
- "1317" - "1317"
sdk-test-runner: sdk-test-runner:
image: cerc/laconic-sdk-tester:local image: cerc-io/laconic-sdk-tester:local-test
depends_on: depends_on:
laconicd: laconicd:
condition: service_healthy condition: service_healthy

3
tests/sdk_tests/entrypoint.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
exec "$@"

View File

@ -0,0 +1,16 @@
#!/usr/bin/env bash
if [ -n "$CERC_SCRIPT_DEBUG" ]; then
set -x
fi
# Get the key from laconicd
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
# Set parameters for the test suite
cosmos_chain_id=laconic_9000-1
laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api
# Run tests
docker network inspect sdk_tests_default
sleep 30s
docker logs sdk_tests-laconicd-1
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test:auctions"

View File

@ -0,0 +1,16 @@
#!/usr/bin/env bash
if [ -n "$CERC_SCRIPT_DEBUG" ]; then
set -x
fi
# Get the key from laconicd
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
# Set parameters for the test suite
cosmos_chain_id=laconic_9000-1
laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api
# Run tests
docker network inspect sdk_tests_default
sleep 30s
docker logs sdk_tests-laconicd-1
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test:nameservice-expiry"

View File

@ -1,28 +1,18 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# Forwards all args to yarn on the sdk-test-runner container
if [ -n "$CERC_SCRIPT_DEBUG" ]; then if [ -n "$CERC_SCRIPT_DEBUG" ]; then
set -x set -x
fi fi
yarn_args=("--inspect-brk=8888")
yarn_args+=("${@:-test}")
# Get the key from laconicd # Get the key from laconicd
laconicd_key=$( laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
yes | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe
)
# Set parameters for the test suite # Set parameters for the test suite
cosmos_chain_id=laconic_9000-1 cosmos_chain_id=laconic_9000-1
laconicd_rest_endpoint=http://laconicd:1317 laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api laconicd_gql_endpoint=http://laconicd:9473/api
docker compose exec laconicd sh -c "curl --retry 10 --retry-delay 3 --retry-connrefused http://127.0.0.1:9473/api"
# Run tests # Run tests
docker compose exec \ docker network inspect sdk_tests_default
-e COSMOS_CHAIN_ID="$cosmos_chain_id" \ sleep 30s
-e LACONICD_REST_ENDPOINT="$laconicd_rest_endpoint" \ docker logs laconicd
-e LACONICD_GQL_ENDPOINT="$laconicd_gql_endpoint" \ docker compose exec laconicd sh -c "curl http://127.0.0.1:9473/api"
-e PRIVATE_KEY="$laconicd_key" \ docker compose exec laconicd sh -c "curl http://localhost:9473/api"
sdk-test-runner yarn run "${yarn_args[@]}"
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test"

View File

@ -519,7 +519,6 @@ func New(l Logger, baseDir string, cfg Config) (*Network, error) {
l.Log("starting test network...") l.Log("starting test network...")
for _, v := range network.Validators { for _, v := range network.Validators {
l.Log("starting validator:", v.Moniker)
err := startInProcess(cfg, v) err := startInProcess(cfg, v)
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -1,20 +0,0 @@
package utils
import (
storetypes "github.com/cosmos/cosmos-sdk/store/types"
sdk "github.com/cosmos/cosmos-sdk/types"
)
func CtxWithCustomKVGasConfig(ctx *sdk.Context) *sdk.Context {
updatedCtx := ctx.WithKVGasConfig(storetypes.GasConfig{
HasCost: 0,
DeleteCost: 0,
ReadCostFlat: 0,
ReadCostPerByte: 0,
WriteCostFlat: 0,
WriteCostPerByte: 0,
IterNextCostFlat: 0,
})
return &updatedCtx
}

View File

@ -15,7 +15,6 @@ import (
auth "github.com/cosmos/cosmos-sdk/x/auth/keeper" auth "github.com/cosmos/cosmos-sdk/x/auth/keeper"
bank "github.com/cosmos/cosmos-sdk/x/bank/keeper" bank "github.com/cosmos/cosmos-sdk/x/bank/keeper"
params "github.com/cosmos/cosmos-sdk/x/params/types" params "github.com/cosmos/cosmos-sdk/x/params/types"
"github.com/tendermint/tendermint/libs/log"
wnsUtils "github.com/cerc-io/laconicd/utils" wnsUtils "github.com/cerc-io/laconicd/utils"
) )
@ -79,11 +78,6 @@ func NewKeeper(accountKeeper auth.AccountKeeper,
} }
} }
// Logger returns a module-specific logger.
func (k Keeper) Logger(ctx sdk.Context) log.Logger {
return ctx.Logger().With("module", types.ModuleName)
}
func (k *Keeper) SetUsageKeepers(usageKeepers []types.AuctionUsageKeeper) { func (k *Keeper) SetUsageKeepers(usageKeepers []types.AuctionUsageKeeper) {
k.usageKeepers = usageKeepers k.usageKeepers = usageKeepers
} }

View File

@ -2,11 +2,9 @@ package keeper
import ( import (
"context" "context"
"fmt"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cerc-io/laconicd/utils"
"github.com/cerc-io/laconicd/x/auction/types" "github.com/cerc-io/laconicd/x/auction/types"
) )
@ -22,7 +20,6 @@ var _ types.MsgServer = msgServer{}
func (s msgServer) CreateAuction(c context.Context, msg *types.MsgCreateAuction) (*types.MsgCreateAuctionResponse, error) { func (s msgServer) CreateAuction(c context.Context, msg *types.MsgCreateAuction) (*types.MsgCreateAuctionResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
signerAddress, err := sdk.AccAddressFromBech32(msg.Signer) signerAddress, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
@ -49,15 +46,13 @@ func (s msgServer) CreateAuction(c context.Context, msg *types.MsgCreateAuction)
), ),
}) })
s.logTxGasConsumed(ctx, "CreateAuction")
return &types.MsgCreateAuctionResponse{Auction: resp}, nil return &types.MsgCreateAuctionResponse{Auction: resp}, nil
} }
// CommitBid is the command for committing a bid // CommitBid is the command for committing a bid
//nolint: all
func (s msgServer) CommitBid(c context.Context, msg *types.MsgCommitBid) (*types.MsgCommitBidResponse, error) { func (s msgServer) CommitBid(c context.Context, msg *types.MsgCommitBid) (*types.MsgCommitBidResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
signerAddress, err := sdk.AccAddressFromBech32(msg.Signer) signerAddress, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
@ -82,15 +77,13 @@ func (s msgServer) CommitBid(c context.Context, msg *types.MsgCommitBid) (*types
), ),
}) })
s.logTxGasConsumed(ctx, "CommitBid")
return &types.MsgCommitBidResponse{Bid: resp}, nil return &types.MsgCommitBidResponse{Bid: resp}, nil
} }
// RevealBid is the command for revealing a bid //RevealBid is the command for revealing a bid
//nolint: all
func (s msgServer) RevealBid(c context.Context, msg *types.MsgRevealBid) (*types.MsgRevealBidResponse, error) { func (s msgServer) RevealBid(c context.Context, msg *types.MsgRevealBid) (*types.MsgRevealBidResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
signerAddress, err := sdk.AccAddressFromBech32(msg.Signer) signerAddress, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
@ -115,12 +108,5 @@ func (s msgServer) RevealBid(c context.Context, msg *types.MsgRevealBid) (*types
), ),
}) })
s.logTxGasConsumed(ctx, "RevealBid")
return &types.MsgRevealBidResponse{Auction: resp}, nil return &types.MsgRevealBidResponse{Auction: resp}, nil
} }
func (s msgServer) logTxGasConsumed(ctx sdk.Context, tx string) {
gasConsumed := ctx.GasMeter().GasConsumed()
s.Keeper.Logger(ctx).Info("tx executed", "method", tx, "gas_consumed", fmt.Sprintf("%d", gasConsumed))
}

View File

@ -1,337 +0,0 @@
// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
// source: vulcanize/auction/v1beta1/tx.proto
/*
Package types is a reverse proxy.
It translates gRPC into RESTful JSON APIs.
*/
package types
import (
"context"
"io"
"net/http"
"github.com/golang/protobuf/descriptor"
"github.com/golang/protobuf/proto"
"github.com/grpc-ecosystem/grpc-gateway/runtime"
"github.com/grpc-ecosystem/grpc-gateway/utilities"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
)
// Suppress "imported and not used" errors
var _ codes.Code
var _ io.Reader
var _ status.Status
var _ = runtime.String
var _ = utilities.NewDoubleArray
var _ = descriptor.ForMessage
var _ = metadata.Join
var (
filter_Msg_CreateAuction_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_Msg_CreateAuction_0(ctx context.Context, marshaler runtime.Marshaler, client MsgClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgCreateAuction
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_CreateAuction_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.CreateAuction(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Msg_CreateAuction_0(ctx context.Context, marshaler runtime.Marshaler, server MsgServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgCreateAuction
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_CreateAuction_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.CreateAuction(ctx, &protoReq)
return msg, metadata, err
}
var (
filter_Msg_CommitBid_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_Msg_CommitBid_0(ctx context.Context, marshaler runtime.Marshaler, client MsgClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgCommitBid
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_CommitBid_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.CommitBid(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Msg_CommitBid_0(ctx context.Context, marshaler runtime.Marshaler, server MsgServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgCommitBid
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_CommitBid_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.CommitBid(ctx, &protoReq)
return msg, metadata, err
}
var (
filter_Msg_RevealBid_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_Msg_RevealBid_0(ctx context.Context, marshaler runtime.Marshaler, client MsgClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgRevealBid
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_RevealBid_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.RevealBid(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Msg_RevealBid_0(ctx context.Context, marshaler runtime.Marshaler, server MsgServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgRevealBid
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_RevealBid_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.RevealBid(ctx, &protoReq)
return msg, metadata, err
}
// RegisterMsgHandlerServer registers the http handlers for service Msg to "mux".
// UnaryRPC :call MsgServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterMsgHandlerFromEndpoint instead.
func RegisterMsgHandlerServer(ctx context.Context, mux *runtime.ServeMux, server MsgServer) error {
mux.Handle("POST", pattern_Msg_CreateAuction_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Msg_CreateAuction_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_CreateAuction_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_Msg_CommitBid_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Msg_CommitBid_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_CommitBid_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_Msg_RevealBid_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Msg_RevealBid_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_RevealBid_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
// RegisterMsgHandlerFromEndpoint is same as RegisterMsgHandler but
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
func RegisterMsgHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
conn, err := grpc.Dial(endpoint, opts...)
if err != nil {
return err
}
defer func() {
if err != nil {
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
return
}
go func() {
<-ctx.Done()
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
}()
}()
return RegisterMsgHandler(ctx, mux, conn)
}
// RegisterMsgHandler registers the http handlers for service Msg to "mux".
// The handlers forward requests to the grpc endpoint over "conn".
func RegisterMsgHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
return RegisterMsgHandlerClient(ctx, mux, NewMsgClient(conn))
}
// RegisterMsgHandlerClient registers the http handlers for service Msg
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "MsgClient".
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "MsgClient"
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
// "MsgClient" to call the correct interceptors.
func RegisterMsgHandlerClient(ctx context.Context, mux *runtime.ServeMux, client MsgClient) error {
mux.Handle("POST", pattern_Msg_CreateAuction_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Msg_CreateAuction_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_CreateAuction_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_Msg_CommitBid_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Msg_CommitBid_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_CommitBid_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_Msg_RevealBid_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Msg_RevealBid_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_RevealBid_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
var (
pattern_Msg_CreateAuction_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"vulcanize", "auction", "v1beta1", "create_auction"}, "", runtime.AssumeColonVerbOpt(false)))
pattern_Msg_CommitBid_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"vulcanize", "auction", "v1beta1", "commit_bid"}, "", runtime.AssumeColonVerbOpt(false)))
pattern_Msg_RevealBid_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"vulcanize", "auction", "v1beta1", "reveal_bid"}, "", runtime.AssumeColonVerbOpt(false)))
)
var (
forward_Msg_CreateAuction_0 = runtime.ForwardResponseMessage
forward_Msg_CommitBid_0 = runtime.ForwardResponseMessage
forward_Msg_RevealBid_0 = runtime.ForwardResponseMessage
)

View File

@ -14,7 +14,6 @@ import (
auth "github.com/cosmos/cosmos-sdk/x/auth/keeper" auth "github.com/cosmos/cosmos-sdk/x/auth/keeper"
bank "github.com/cosmos/cosmos-sdk/x/bank/keeper" bank "github.com/cosmos/cosmos-sdk/x/bank/keeper"
paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" paramtypes "github.com/cosmos/cosmos-sdk/x/params/types"
"github.com/tendermint/tendermint/libs/log"
) )
// prefixIDToBondIndex is the prefix for ID -> Bond index in the KVStore. // prefixIDToBondIndex is the prefix for ID -> Bond index in the KVStore.
@ -63,11 +62,6 @@ func NewKeeper(cdc codec.BinaryCodec,
} }
} }
// Logger returns a module-specific logger.
func (k Keeper) Logger(ctx sdk.Context) log.Logger {
return ctx.Logger().With("module", types.ModuleName)
}
// Generates Bond ID -> Bond index key. // Generates Bond ID -> Bond index key.
func getBondIndexKey(id string) []byte { func getBondIndexKey(id string) []byte {
return append(prefixIDToBondIndex, []byte(id)...) return append(prefixIDToBondIndex, []byte(id)...)

View File

@ -2,12 +2,9 @@ package keeper
import ( import (
"context" "context"
"fmt"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cerc-io/laconicd/utils"
"github.com/cerc-io/laconicd/x/bond/types" "github.com/cerc-io/laconicd/x/bond/types"
sdk "github.com/cosmos/cosmos-sdk/types"
) )
type msgServer struct { type msgServer struct {
@ -23,8 +20,6 @@ var _ types.MsgServer = msgServer{}
func (k msgServer) CreateBond(c context.Context, msg *types.MsgCreateBond) (*types.MsgCreateBondResponse, error) { func (k msgServer) CreateBond(c context.Context, msg *types.MsgCreateBond) (*types.MsgCreateBondResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
signerAddress, err := sdk.AccAddressFromBech32(msg.Signer) signerAddress, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -47,15 +42,12 @@ func (k msgServer) CreateBond(c context.Context, msg *types.MsgCreateBond) (*typ
), ),
}) })
k.logTxGasConsumed(ctx, "CreateBond")
return &types.MsgCreateBondResponse{}, nil return &types.MsgCreateBondResponse{}, nil
} }
//nolint: all
func (k msgServer) RefillBond(c context.Context, msg *types.MsgRefillBond) (*types.MsgRefillBondResponse, error) { func (k msgServer) RefillBond(c context.Context, msg *types.MsgRefillBond) (*types.MsgRefillBondResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
signerAddress, err := sdk.AccAddressFromBech32(msg.Signer) signerAddress, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -80,15 +72,12 @@ func (k msgServer) RefillBond(c context.Context, msg *types.MsgRefillBond) (*typ
), ),
}) })
k.logTxGasConsumed(ctx, "RefillBond")
return &types.MsgRefillBondResponse{}, nil return &types.MsgRefillBondResponse{}, nil
} }
//nolint: all
func (k msgServer) WithdrawBond(c context.Context, msg *types.MsgWithdrawBond) (*types.MsgWithdrawBondResponse, error) { func (k msgServer) WithdrawBond(c context.Context, msg *types.MsgWithdrawBond) (*types.MsgWithdrawBondResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
signerAddress, err := sdk.AccAddressFromBech32(msg.Signer) signerAddress, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -113,15 +102,11 @@ func (k msgServer) WithdrawBond(c context.Context, msg *types.MsgWithdrawBond) (
), ),
}) })
k.logTxGasConsumed(ctx, "WithdrawBond")
return &types.MsgWithdrawBondResponse{}, nil return &types.MsgWithdrawBondResponse{}, nil
} }
func (k msgServer) CancelBond(c context.Context, msg *types.MsgCancelBond) (*types.MsgCancelBondResponse, error) { func (k msgServer) CancelBond(c context.Context, msg *types.MsgCancelBond) (*types.MsgCancelBondResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
signerAddress, err := sdk.AccAddressFromBech32(msg.Signer) signerAddress, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -144,12 +129,5 @@ func (k msgServer) CancelBond(c context.Context, msg *types.MsgCancelBond) (*typ
), ),
}) })
k.logTxGasConsumed(ctx, "CancelBond")
return &types.MsgCancelBondResponse{}, nil return &types.MsgCancelBondResponse{}, nil
} }
func (k msgServer) logTxGasConsumed(ctx sdk.Context, tx string) {
gasConsumed := ctx.GasMeter().GasConsumed()
k.Keeper.Logger(ctx).Info("tx executed", "method", tx, "gas_consumed", fmt.Sprintf("%d", gasConsumed))
}

420
x/bond/types/tx.pb.gw.go generated
View File

@ -1,420 +0,0 @@
// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
// source: vulcanize/bond/v1beta1/tx.proto
/*
Package types is a reverse proxy.
It translates gRPC into RESTful JSON APIs.
*/
package types
import (
"context"
"io"
"net/http"
"github.com/golang/protobuf/descriptor"
"github.com/golang/protobuf/proto"
"github.com/grpc-ecosystem/grpc-gateway/runtime"
"github.com/grpc-ecosystem/grpc-gateway/utilities"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
)
// Suppress "imported and not used" errors
var _ codes.Code
var _ io.Reader
var _ status.Status
var _ = runtime.String
var _ = utilities.NewDoubleArray
var _ = descriptor.ForMessage
var _ = metadata.Join
var (
filter_Msg_CreateBond_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_Msg_CreateBond_0(ctx context.Context, marshaler runtime.Marshaler, client MsgClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgCreateBond
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_CreateBond_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.CreateBond(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Msg_CreateBond_0(ctx context.Context, marshaler runtime.Marshaler, server MsgServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgCreateBond
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_CreateBond_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.CreateBond(ctx, &protoReq)
return msg, metadata, err
}
var (
filter_Msg_RefillBond_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_Msg_RefillBond_0(ctx context.Context, marshaler runtime.Marshaler, client MsgClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgRefillBond
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_RefillBond_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.RefillBond(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Msg_RefillBond_0(ctx context.Context, marshaler runtime.Marshaler, server MsgServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgRefillBond
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_RefillBond_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.RefillBond(ctx, &protoReq)
return msg, metadata, err
}
var (
filter_Msg_WithdrawBond_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_Msg_WithdrawBond_0(ctx context.Context, marshaler runtime.Marshaler, client MsgClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgWithdrawBond
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_WithdrawBond_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.WithdrawBond(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Msg_WithdrawBond_0(ctx context.Context, marshaler runtime.Marshaler, server MsgServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgWithdrawBond
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_WithdrawBond_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.WithdrawBond(ctx, &protoReq)
return msg, metadata, err
}
var (
filter_Msg_CancelBond_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_Msg_CancelBond_0(ctx context.Context, marshaler runtime.Marshaler, client MsgClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgCancelBond
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_CancelBond_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.CancelBond(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Msg_CancelBond_0(ctx context.Context, marshaler runtime.Marshaler, server MsgServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq MsgCancelBond
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Msg_CancelBond_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.CancelBond(ctx, &protoReq)
return msg, metadata, err
}
// RegisterMsgHandlerServer registers the http handlers for service Msg to "mux".
// UnaryRPC :call MsgServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterMsgHandlerFromEndpoint instead.
func RegisterMsgHandlerServer(ctx context.Context, mux *runtime.ServeMux, server MsgServer) error {
mux.Handle("POST", pattern_Msg_CreateBond_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Msg_CreateBond_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_CreateBond_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_Msg_RefillBond_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Msg_RefillBond_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_RefillBond_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_Msg_WithdrawBond_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Msg_WithdrawBond_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_WithdrawBond_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_Msg_CancelBond_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Msg_CancelBond_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_CancelBond_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
// RegisterMsgHandlerFromEndpoint is same as RegisterMsgHandler but
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
func RegisterMsgHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
conn, err := grpc.Dial(endpoint, opts...)
if err != nil {
return err
}
defer func() {
if err != nil {
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
return
}
go func() {
<-ctx.Done()
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
}()
}()
return RegisterMsgHandler(ctx, mux, conn)
}
// RegisterMsgHandler registers the http handlers for service Msg to "mux".
// The handlers forward requests to the grpc endpoint over "conn".
func RegisterMsgHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
return RegisterMsgHandlerClient(ctx, mux, NewMsgClient(conn))
}
// RegisterMsgHandlerClient registers the http handlers for service Msg
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "MsgClient".
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "MsgClient"
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
// "MsgClient" to call the correct interceptors.
func RegisterMsgHandlerClient(ctx context.Context, mux *runtime.ServeMux, client MsgClient) error {
mux.Handle("POST", pattern_Msg_CreateBond_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Msg_CreateBond_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_CreateBond_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_Msg_RefillBond_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Msg_RefillBond_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_RefillBond_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_Msg_WithdrawBond_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Msg_WithdrawBond_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_WithdrawBond_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_Msg_CancelBond_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Msg_CancelBond_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Msg_CancelBond_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
var (
pattern_Msg_CreateBond_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"vulcanize", "bond", "v1beta1", "create_bond"}, "", runtime.AssumeColonVerbOpt(false)))
pattern_Msg_RefillBond_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"vulcanize", "bond", "v1beta1", "refill_bond"}, "", runtime.AssumeColonVerbOpt(false)))
pattern_Msg_WithdrawBond_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"vulcanize", "bond", "v1beta1", "withdraw_bond"}, "", runtime.AssumeColonVerbOpt(false)))
pattern_Msg_CancelBond_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"vulcanize", "bond", "v1beta1", "cancel_bond"}, "", runtime.AssumeColonVerbOpt(false)))
)
var (
forward_Msg_CreateBond_0 = runtime.ForwardResponseMessage
forward_Msg_RefillBond_0 = runtime.ForwardResponseMessage
forward_Msg_WithdrawBond_0 = runtime.ForwardResponseMessage
forward_Msg_CancelBond_0 = runtime.ForwardResponseMessage
)

View File

@ -32,10 +32,11 @@ import (
"github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/crypto"
) )
// #nosec 101
const ( const (
OpWeightMsgEthSimpleTransfer = "op_weight_msg_eth_simple_transfer" // #nosec G101 OpWeightMsgEthSimpleTransfer = "op_weight_msg_eth_simple_transfer"
OpWeightMsgEthCreateContract = "op_weight_msg_eth_create_contract" // #nosec G101 OpWeightMsgEthCreateContract = "op_weight_msg_eth_create_contract"
OpWeightMsgEthCallContract = "op_weight_msg_eth_call_contract" // #nosec G101 OpWeightMsgEthCallContract = "op_weight_msg_eth_call_contract"
) )
const ( const (

View File

@ -161,9 +161,9 @@ $ %s query %s list
} }
recordsList := res.GetRecords() recordsList := res.GetRecords()
records := make([]types.ReadableRecord, len(recordsList)) records := make([]types.RecordType, len(recordsList))
for i, record := range res.GetRecords() { for i, record := range res.GetRecords() {
records[i] = record.ToReadableRecord() records[i] = record.ToRecordType()
} }
bytesResult, err := json.Marshal(records) bytesResult, err := json.Marshal(records)
if err != nil { if err != nil {

View File

@ -66,10 +66,17 @@ $ %s tx %s set [payload file path] [bond-id]
return err return err
} }
payload := payloadType.ToPayload() payload, err := payloadType.ToPayload()
if err != nil {
return err
}
msg := types.NewMsgSetRecord(payload, args[1], clientCtx.GetFromAddress()) msg := types.NewMsgSetRecord(payload, args[1], clientCtx.GetFromAddress())
return tx.GenerateOrBroadcastTxCLI(clientCtx, cmd.Flags(), msg) err = msg.ValidateBasic()
if err != nil {
return err
}
return tx.GenerateOrBroadcastTxCLI(clientCtx, cmd.Flags(), &msg)
}, },
} }
@ -262,7 +269,7 @@ $ %s tx %s set-name [crn] [cid]
if err != nil { if err != nil {
return err return err
} }
return tx.GenerateOrBroadcastTxCLI(clientCtx, cmd.Flags(), msg) return tx.GenerateOrBroadcastTxCLI(clientCtx, cmd.Flags(), &msg)
}, },
} }
@ -370,8 +377,8 @@ $ %s tx %s delete-name [crn]
} }
// GetPayloadFromFile Load payload object from YAML file. // GetPayloadFromFile Load payload object from YAML file.
func GetPayloadFromFile(filePath string) (*types.ReadablePayload, error) { func GetPayloadFromFile(filePath string) (*types.PayloadType, error) {
var payload types.ReadablePayload var payload types.PayloadType
data, err := os.ReadFile(filePath) // #nosec G304 data, err := os.ReadFile(filePath) // #nosec G304
if err != nil { if err != nil {

View File

@ -15,8 +15,6 @@ import (
tmcli "github.com/tendermint/tendermint/libs/cli" tmcli "github.com/tendermint/tendermint/libs/cli"
) )
const badPath = "/asdasd"
func (s *IntegrationTestSuite) TestGRPCQueryParams() { func (s *IntegrationTestSuite) TestGRPCQueryParams() {
val := s.network.Validators[0] val := s.network.Validators[0]
sr := s.Require() sr := s.Require()
@ -30,7 +28,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryParams() {
}{ }{
{ {
"invalid url", "invalid url",
reqURL + badPath, reqURL + "/asdasd",
true, true,
"", "",
}, },
@ -44,8 +42,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryParams() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(tc.name, func() { s.Run(tc.name, func() {
resp, err := rest.GetRequest(tc.url) resp, _ := rest.GetRequest(tc.url)
s.NoError(err)
require := s.Require() require := s.Require()
if tc.expectErr { if tc.expectErr {
require.Contains(string(resp), tc.errorMsg) require.Contains(string(resp), tc.errorMsg)
@ -63,7 +60,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryParams() {
} }
} }
//nolint: all //nolint:all
func (s *IntegrationTestSuite) TestGRPCQueryWhoIs() { func (s *IntegrationTestSuite) TestGRPCQueryWhoIs() {
val := s.network.Validators[0] val := s.network.Validators[0]
sr := s.Require() sr := s.Require()
@ -78,7 +75,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryWhoIs() {
}{ }{
{ {
"invalid url", "invalid url",
reqUrl + badPath, reqUrl + "/asdasd",
true, true,
"", "",
func(authorityName string) { func(authorityName string) {
@ -113,11 +110,11 @@ func (s *IntegrationTestSuite) TestGRPCQueryWhoIs() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(tc.name, func() { s.Run(tc.name, func() {
tc.preRun(authorityName) if !tc.expectErr {
tc.url = fmt.Sprintf(tc.url, authorityName) tc.preRun(authorityName)
tc.url = fmt.Sprintf(tc.url, authorityName)
resp, err := rest.GetRequest(tc.url) }
s.NoError(err) resp, _ := rest.GetRequest(tc.url)
require := s.Require() require := s.Require()
if tc.expectErr { if tc.expectErr {
require.Contains(string(resp), tc.errorMsg) require.Contains(string(resp), tc.errorMsg)
@ -134,7 +131,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryWhoIs() {
func (s *IntegrationTestSuite) TestGRPCQueryLookup() { func (s *IntegrationTestSuite) TestGRPCQueryLookup() {
val := s.network.Validators[0] val := s.network.Validators[0]
sr := s.Require() sr := s.Require()
reqURL := val.APIAddress + "/vulcanize/registry/v1beta1/lookup" reqURL := val.APIAddress + "/vulcanize/registry/v1beta1/lookup?crn=%s"
authorityName := "QueryLookUp" authorityName := "QueryLookUp"
testCases := []struct { testCases := []struct {
@ -146,7 +143,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryLookup() {
}{ }{
{ {
"invalid url", "invalid url",
reqURL + badPath, reqURL + "/asdasd",
true, true,
"", "",
func(authorityName string) { func(authorityName string) {
@ -154,7 +151,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryLookup() {
}, },
{ {
"Success", "Success",
fmt.Sprintf(reqURL+"?crn=crn://%s/", authorityName), reqURL,
false, false,
"", "",
func(authorityName string) { func(authorityName string) {
@ -166,9 +163,11 @@ func (s *IntegrationTestSuite) TestGRPCQueryLookup() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(tc.name, func() { s.Run(tc.name, func() {
tc.preRun(authorityName) if !tc.expectErr {
resp, err := rest.GetRequest(tc.url) tc.preRun(authorityName)
s.NoError(err) tc.url = fmt.Sprintf(reqURL, fmt.Sprintf("crn://%s/", authorityName))
}
resp, _ := rest.GetRequest(tc.url)
if tc.expectErr { if tc.expectErr {
sr.Contains(string(resp), tc.errorMsg) sr.Contains(string(resp), tc.errorMsg)
} else { } else {
@ -181,7 +180,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryLookup() {
} }
} }
//nolint: all //nolint:all
func (s *IntegrationTestSuite) TestGRPCQueryRecordExpiryQueue() { func (s *IntegrationTestSuite) TestGRPCQueryRecordExpiryQueue() {
val := s.network.Validators[0] val := s.network.Validators[0]
sr := s.Require() sr := s.Require()
@ -196,7 +195,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryRecordExpiryQueue() {
}{ }{
{ {
"invalid url", "invalid url",
reqUrl + badPath, reqUrl + "/asdasd",
true, true,
"", "",
func(bondId string) { func(bondId string) {
@ -210,7 +209,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryRecordExpiryQueue() {
func(bondId string) { func(bondId string) {
dir, err := os.Getwd() dir, err := os.Getwd()
sr.NoError(err) sr.NoError(err)
payloadPath := dir + "/service_provider_example.yml" payloadPath := dir + "/../../helpers/examples/service_provider_example.yml"
args := []string{ args := []string{
fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName), fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName),
fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation),
@ -234,11 +233,12 @@ func (s *IntegrationTestSuite) TestGRPCQueryRecordExpiryQueue() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(tc.name, func() { s.Run(tc.name, func() {
tc.preRun(s.bondID) if !tc.expectErr {
tc.preRun(s.bondID)
}
// wait 12 seconds for records expires // wait 12 seconds for records expires
time.Sleep(time.Second * 12) time.Sleep(time.Second * 12)
resp, err := rest.GetRequest(tc.url) resp, _ := rest.GetRequest(tc.url)
s.NoError(err)
require := s.Require() require := s.Require()
if tc.expectErr { if tc.expectErr {
require.Contains(string(resp), tc.errorMsg) require.Contains(string(resp), tc.errorMsg)
@ -252,7 +252,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryRecordExpiryQueue() {
} }
} }
//nolint: all //nolint:all
func (s *IntegrationTestSuite) TestGRPCQueryAuthorityExpiryQueue() { func (s *IntegrationTestSuite) TestGRPCQueryAuthorityExpiryQueue() {
val := s.network.Validators[0] val := s.network.Validators[0]
sr := s.Require() sr := s.Require()
@ -267,7 +267,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryAuthorityExpiryQueue() {
}{ }{
{ {
"invalid url", "invalid url",
reqUrl + badPath, reqUrl + "/asdasd",
true, true,
"", "",
func(authorityName string) { func(authorityName string) {
@ -303,12 +303,13 @@ func (s *IntegrationTestSuite) TestGRPCQueryAuthorityExpiryQueue() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(tc.name, func() { s.Run(tc.name, func() {
tc.preRun("QueryAuthorityExpiryQueue") if !tc.expectErr {
tc.preRun("QueryAuthorityExpiryQueue")
}
// wait 12 seconds to name authorites expires // wait 12 seconds to name authorites expires
time.Sleep(time.Second * 12) time.Sleep(time.Second * 12)
resp, err := rest.GetRequest(tc.url) resp, _ := rest.GetRequest(tc.url)
s.NoError(err)
require := s.Require() require := s.Require()
if tc.expectErr { if tc.expectErr {
require.Contains(string(resp), tc.errorMsg) require.Contains(string(resp), tc.errorMsg)
@ -323,7 +324,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryAuthorityExpiryQueue() {
} }
} }
//nolint: all //nolint:all
func (s *IntegrationTestSuite) TestGRPCQueryListRecords() { func (s *IntegrationTestSuite) TestGRPCQueryListRecords() {
val := s.network.Validators[0] val := s.network.Validators[0]
sr := s.Require() sr := s.Require()
@ -338,7 +339,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryListRecords() {
}{ }{
{ {
"invalid url", "invalid url",
reqUrl + badPath, reqUrl + "/asdasd",
true, true,
"", "",
func(bondId string) { func(bondId string) {
@ -352,7 +353,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryListRecords() {
func(bondId string) { func(bondId string) {
dir, err := os.Getwd() dir, err := os.Getwd()
sr.NoError(err) sr.NoError(err)
payloadPath := dir + "/service_provider_example.yml" payloadPath := dir + "/../../helpers/examples/service_provider_example.yml"
args := []string{ args := []string{
fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName), fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName),
fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation),
@ -376,9 +377,10 @@ func (s *IntegrationTestSuite) TestGRPCQueryListRecords() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(tc.name, func() { s.Run(tc.name, func() {
tc.preRun(s.bondID) if !tc.expectErr {
resp, err := rest.GetRequest(tc.url) tc.preRun(s.bondID)
s.NoError(err) }
resp, _ := rest.GetRequest(tc.url)
require := s.Require() require := s.Require()
if tc.expectErr { if tc.expectErr {
require.Contains(string(resp), tc.errorMsg) require.Contains(string(resp), tc.errorMsg)
@ -407,7 +409,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryGetRecordByID() {
}{ }{
{ {
"invalid url", "invalid url",
reqURL + badPath, reqURL + "/asdasd",
true, true,
"", "",
func(bondId string) string { func(bondId string) string {
@ -431,7 +433,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryGetRecordByID() {
} }
out, err := clitestutil.ExecTestCLICmd(clientCtx, cmd, args) out, err := clitestutil.ExecTestCLICmd(clientCtx, cmd, args)
sr.NoError(err) sr.NoError(err)
var records []nstypes.ReadableRecord var records []nstypes.RecordType
err = json.Unmarshal(out.Bytes(), &records) err = json.Unmarshal(out.Bytes(), &records)
sr.NoError(err) sr.NoError(err)
return records[0].ID return records[0].ID
@ -441,11 +443,12 @@ func (s *IntegrationTestSuite) TestGRPCQueryGetRecordByID() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(tc.name, func() { s.Run(tc.name, func() {
recordID := tc.preRun(s.bondID) var recordID string
tc.url = fmt.Sprintf(reqURL, recordID) if !tc.expectErr {
recordID = tc.preRun(s.bondID)
resp, err := rest.GetRequest(tc.url) tc.url = fmt.Sprintf(reqURL, recordID)
s.NoError(err) }
resp, _ := rest.GetRequest(tc.url)
require := s.Require() require := s.Require()
if tc.expectErr { if tc.expectErr {
require.Contains(string(resp), tc.errorMsg) require.Contains(string(resp), tc.errorMsg)
@ -475,7 +478,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryGetRecordByBondID() {
}{ }{
{ {
"invalid url", "invalid url",
reqURL + badPath, reqURL + "/asdasd",
true, true,
"", "",
func(bondId string) { func(bondId string) {
@ -495,11 +498,11 @@ func (s *IntegrationTestSuite) TestGRPCQueryGetRecordByBondID() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(tc.name, func() { s.Run(tc.name, func() {
tc.preRun(s.bondID) if !tc.expectErr {
tc.url = fmt.Sprintf(reqURL, s.bondID) tc.preRun(s.bondID)
tc.url = fmt.Sprintf(reqURL, s.bondID)
resp, err := rest.GetRequest(tc.url) }
s.NoError(err) resp, _ := rest.GetRequest(tc.url)
require := s.Require() require := s.Require()
if tc.expectErr { if tc.expectErr {
require.Contains(string(resp), tc.errorMsg) require.Contains(string(resp), tc.errorMsg)
@ -529,7 +532,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryGetRegistryModuleBalance() {
}{ }{
{ {
"invalid url", "invalid url",
reqURL + badPath, reqURL + "/asdasd",
true, true,
"", "",
func(bondId string) { func(bondId string) {
@ -549,9 +552,10 @@ func (s *IntegrationTestSuite) TestGRPCQueryGetRegistryModuleBalance() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(tc.name, func() { s.Run(tc.name, func() {
tc.preRun(s.bondID) if !tc.expectErr {
resp, err := rest.GetRequest(tc.url) tc.preRun(s.bondID)
s.NoError(err) }
resp, _ := rest.GetRequest(tc.url)
require := s.Require() require := s.Require()
if tc.expectErr { if tc.expectErr {
require.Contains(string(resp), tc.errorMsg) require.Contains(string(resp), tc.errorMsg)
@ -579,7 +583,7 @@ func (s *IntegrationTestSuite) TestGRPCQueryNamesList() {
}{ }{
{ {
"invalid url", "invalid url",
reqURL + badPath, reqURL + "/asdasd",
true, true,
"", "",
func(authorityName string) { func(authorityName string) {
@ -599,9 +603,10 @@ func (s *IntegrationTestSuite) TestGRPCQueryNamesList() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(tc.name, func() { s.Run(tc.name, func() {
tc.preRun("ListNameRecords") if !tc.expectErr {
resp, err := rest.GetRequest(tc.url) tc.preRun("ListNameRecords")
s.NoError(err) }
resp, _ := rest.GetRequest(tc.url)
require := s.Require() require := s.Require()
if tc.expectErr { if tc.expectErr {
require.Contains(string(resp), tc.errorMsg) require.Contains(string(resp), tc.errorMsg)
@ -621,7 +626,7 @@ func createRecord(bondID string, s *IntegrationTestSuite) {
dir, err := os.Getwd() dir, err := os.Getwd()
sr.NoError(err) sr.NoError(err)
payloadPath := dir + "/service_provider_example.yml" payloadPath := dir + "/../../helpers/examples/service_provider_example.yml"
args := []string{ args := []string{
fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName), fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName),
fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation),
@ -638,5 +643,5 @@ func createRecord(bondID string, s *IntegrationTestSuite) {
var d sdk.TxResponse var d sdk.TxResponse
err = val.ClientCtx.Codec.UnmarshalJSON(out.Bytes(), &d) err = val.ClientCtx.Codec.UnmarshalJSON(out.Bytes(), &d)
sr.NoError(err) sr.NoError(err)
sr.Zero(d.Code, d.RawLog) sr.Zero(d.Code)
} }

View File

@ -79,7 +79,7 @@ func (s *IntegrationTestSuite) TestGetCmdQueryForRecords() {
bondID := GetBondID(s) bondID := GetBondID(s)
dir, err := os.Getwd() dir, err := os.Getwd()
sr.NoError(err) sr.NoError(err)
payloadPath := dir + "/service_provider_example.yml" payloadPath := dir + "/../../helpers/examples/service_provider_example.yml"
args := []string{ args := []string{
payloadPath, bondID, payloadPath, bondID,
fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName), fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName),
@ -114,7 +114,7 @@ func (s *IntegrationTestSuite) TestGetCmdQueryForRecords() {
sr.Error(err) sr.Error(err)
} else { } else {
sr.NoError(err) sr.NoError(err)
var records []types.ReadableRecord var records []types.RecordType
err := json.Unmarshal(out.Bytes(), &records) err := json.Unmarshal(out.Bytes(), &records)
sr.NoError(err) sr.NoError(err)
sr.Equal(tc.noOfRecords, len(records)) sr.Equal(tc.noOfRecords, len(records))
@ -538,6 +538,9 @@ func createNameRecord(authorityName string, s *IntegrationTestSuite) {
sr.NoError(err) sr.NoError(err)
sr.Zero(d.Code) sr.Zero(d.Code)
// creating the bond
CreateBond(s)
// Get the bond-id // Get the bond-id
bondID := GetBondID(s) bondID := GetBondID(s)

View File

@ -1,13 +0,0 @@
record:
type: ServiceProviderRegistration
bond_id: madeUpBondID
laconic_id: madeUpLaconicID
version: 1.0.0
x500:
common_name: cerc-io
organization_unit: xyz
organization_name: abc
state_name: california
country: US
locality_name: local

View File

@ -101,23 +101,42 @@ func CreateBond(s *IntegrationTestSuite) {
val := s.network.Validators[0] val := s.network.Validators[0]
sr := s.Require() sr := s.Require()
clientCtx := val.ClientCtx testCases := []struct {
cmd := bondcli.NewCreateBondCmd() name string
args := []string{ args []string
fmt.Sprintf("100000000000%s", s.cfg.BondDenom), err bool
fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName), }{
fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), {
fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock), "create bond",
fmt.Sprintf("--%s=json", tmcli.OutputFlag), []string{
fmt.Sprintf("--%s=%s", flags.FlagFees, fmt.Sprintf("3%s", s.cfg.BondDenom)), fmt.Sprintf("100000000000%s", s.cfg.BondDenom),
fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName),
fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation),
fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock),
fmt.Sprintf("--%s=json", tmcli.OutputFlag),
fmt.Sprintf("--%s=%s", flags.FlagFees, fmt.Sprintf("3%s", s.cfg.BondDenom)),
},
false,
},
} }
out, err := clitestutil.ExecTestCLICmd(clientCtx, cmd, args)
sr.NoError(err)
var d sdk.TxResponse for _, tc := range testCases {
err = val.ClientCtx.Codec.UnmarshalJSON(out.Bytes(), &d) s.Run(fmt.Sprintf("Case %s", tc.name), func() {
sr.NoError(err) clientCtx := val.ClientCtx
sr.Zero(d.Code) cmd := bondcli.NewCreateBondCmd()
out, err := clitestutil.ExecTestCLICmd(clientCtx, cmd, tc.args)
if tc.err {
sr.Error(err)
} else {
sr.NoError(err)
var d sdk.TxResponse
err = val.ClientCtx.Codec.UnmarshalJSON(out.Bytes(), &d)
sr.NoError(err)
sr.Zero(d.Code)
}
})
}
} }
func GetBondID(s *IntegrationTestSuite) string { func GetBondID(s *IntegrationTestSuite) string {
@ -141,11 +160,6 @@ func (s *IntegrationTestSuite) TestGetCmdSetRecord() {
val := s.network.Validators[0] val := s.network.Validators[0]
sr := s.Require() sr := s.Require()
bondID := GetBondID(s)
dir, err := os.Getwd()
sr.NoError(err)
payloadPath := dir + "/service_provider_example.yml"
testCases := []struct { testCases := []struct {
name string name string
args []string args []string
@ -165,7 +179,6 @@ func (s *IntegrationTestSuite) TestGetCmdSetRecord() {
{ {
"success", "success",
[]string{ []string{
payloadPath, bondID,
fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName), fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName),
fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation),
fmt.Sprintf("--%s=json", tmcli.OutputFlag), fmt.Sprintf("--%s=json", tmcli.OutputFlag),
@ -178,6 +191,17 @@ func (s *IntegrationTestSuite) TestGetCmdSetRecord() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(fmt.Sprintf("Case %s", tc.name), func() { s.Run(fmt.Sprintf("Case %s", tc.name), func() {
if !tc.err {
// create the bond
CreateBond(s)
// get the bond id from bond list
bondID := GetBondID(s)
dir, err := os.Getwd()
sr.NoError(err)
payloadPath := dir + "/../../helpers/examples/watcher_registration_example.yml"
tc.args = append([]string{payloadPath, bondID}, tc.args...)
}
clientCtx := val.ClientCtx clientCtx := val.ClientCtx
cmd := cli.GetCmdSetRecord() cmd := cli.GetCmdSetRecord()
@ -317,6 +341,9 @@ func (s *IntegrationTestSuite) TestGetCmdSetName() {
sr.NoError(err) sr.NoError(err)
sr.Zero(d.Code) sr.Zero(d.Code)
// creating the bond
CreateBond(s)
// Get the bond-id // Get the bond-id
bondID := GetBondID(s) bondID := GetBondID(s)
@ -342,7 +369,9 @@ func (s *IntegrationTestSuite) TestGetCmdSetName() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(fmt.Sprintf("Case %s", tc.name), func() { s.Run(fmt.Sprintf("Case %s", tc.name), func() {
tc.preRun(authorityName) if !tc.err {
tc.preRun(authorityName)
}
clientCtx := val.ClientCtx clientCtx := val.ClientCtx
cmd := cli.GetCmdSetName() cmd := cli.GetCmdSetName()
@ -365,7 +394,6 @@ func (s *IntegrationTestSuite) TestGetCmdSetAuthorityBond() {
val := s.network.Validators[0] val := s.network.Validators[0]
sr := s.Require() sr := s.Require()
authorityName := "TestGetCmdSetAuthorityBond" authorityName := "TestGetCmdSetAuthorityBond"
bondID := GetBondID(s)
testCases := []struct { testCases := []struct {
name string name string
@ -389,7 +417,6 @@ func (s *IntegrationTestSuite) TestGetCmdSetAuthorityBond() {
{ {
"success with name and bond-id", "success with name and bond-id",
[]string{ []string{
authorityName, bondID,
fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName), fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName),
fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation),
fmt.Sprintf("--%s=json", tmcli.OutputFlag), fmt.Sprintf("--%s=json", tmcli.OutputFlag),
@ -422,7 +449,15 @@ func (s *IntegrationTestSuite) TestGetCmdSetAuthorityBond() {
for _, tc := range testCases { for _, tc := range testCases {
s.Run(fmt.Sprintf("Case %s", tc.name), func() { s.Run(fmt.Sprintf("Case %s", tc.name), func() {
tc.preRun(authorityName) if !tc.err {
// reserve the name
tc.preRun(authorityName)
// creating the bond
CreateBond(s)
// getting the bond-id
bondID := GetBondID(s)
tc.args = append([]string{authorityName, bondID}, tc.args...)
}
clientCtx := val.ClientCtx clientCtx := val.ClientCtx
cmd := cli.GetCmdSetAuthorityBond() cmd := cli.GetCmdSetAuthorityBond()
@ -466,7 +501,6 @@ func (s *IntegrationTestSuite) TestGetCmdDeleteName() {
{ {
"successfully delete name", "successfully delete name",
[]string{ []string{
fmt.Sprintf("crn://%s/", authorityName),
fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName), fmt.Sprintf("--%s=%s", flags.FlagFrom, accountName),
fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation),
fmt.Sprintf("--%s=json", tmcli.OutputFlag), fmt.Sprintf("--%s=json", tmcli.OutputFlag),
@ -482,8 +516,10 @@ func (s *IntegrationTestSuite) TestGetCmdDeleteName() {
for _, tc := range testCasesForDeletingName { for _, tc := range testCasesForDeletingName {
s.Run(fmt.Sprintf("Case %s", tc.name), func() { s.Run(fmt.Sprintf("Case %s", tc.name), func() {
tc.preRun(authorityName, s) if !tc.err {
tc.preRun(authorityName, s)
tc.args = append([]string{fmt.Sprintf("crn://%s/", authorityName)}, tc.args...)
}
clientCtx := val.ClientCtx clientCtx := val.ClientCtx
cmd := cli.GetCmdDeleteName() cmd := cli.GetCmdDeleteName()
@ -538,11 +574,13 @@ func (s *IntegrationTestSuite) TestGetCmdDissociateBond() {
}, },
false, false,
func(s *IntegrationTestSuite) string { func(s *IntegrationTestSuite) string {
// create the bond
CreateBond(s)
// get the bond id from bond list // get the bond id from bond list
bondID := GetBondID(s) bondID := GetBondID(s)
dir, err := os.Getwd() dir, err := os.Getwd()
sr.NoError(err) sr.NoError(err)
payloadPath := dir + "/service_provider_example.yml" payloadPath := dir + "/../../helpers/examples/service_provider_example.yml"
args := []string{ args := []string{
payloadPath, bondID, payloadPath, bondID,
@ -568,7 +606,7 @@ func (s *IntegrationTestSuite) TestGetCmdDissociateBond() {
cmd = cli.GetCmdList() cmd = cli.GetCmdList()
out, err = clitestutil.ExecTestCLICmd(clientCtx, cmd, args) out, err = clitestutil.ExecTestCLICmd(clientCtx, cmd, args)
sr.NoError(err) sr.NoError(err)
var records []nstypes.ReadableRecord var records []nstypes.RecordType
err = json.Unmarshal(out.Bytes(), &records) err = json.Unmarshal(out.Bytes(), &records)
sr.NoError(err) sr.NoError(err)
return records[0].ID return records[0].ID
@ -778,11 +816,13 @@ func (s *IntegrationTestSuite) TestGetCmdAssociateBond() {
}, },
false, false,
func(s *IntegrationTestSuite) (string, string) { func(s *IntegrationTestSuite) (string, string) {
// create the bond
CreateBond(s)
// get the bond id from bond list // get the bond id from bond list
bondID := GetBondID(s) bondID := GetBondID(s)
dir, err := os.Getwd() dir, err := os.Getwd()
sr.NoError(err) sr.NoError(err)
payloadPath := dir + "/service_provider_example.yml" payloadPath := dir + "/../../helpers/examples/service_provider_example.yml"
txArgs := []string{ txArgs := []string{
payloadPath, bondID, payloadPath, bondID,
@ -808,7 +848,7 @@ func (s *IntegrationTestSuite) TestGetCmdAssociateBond() {
cmd = cli.GetCmdList() cmd = cli.GetCmdList()
out, err = clitestutil.ExecTestCLICmd(clientCtx, cmd, args) out, err = clitestutil.ExecTestCLICmd(clientCtx, cmd, args)
sr.NoError(err) sr.NoError(err)
var records []nstypes.ReadableRecord var records []nstypes.RecordType
err = json.Unmarshal(out.Bytes(), &records) err = json.Unmarshal(out.Bytes(), &records)
sr.NoError(err) sr.NoError(err)

View File

@ -1,7 +0,0 @@
record:
type: WebsiteRegistrationRecord
url: https://cerc.io
repo_registration_record_cid: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D
build_artifact_cid: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9
tls_cert_cid: QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR
version: 1.0.0

View File

@ -0,0 +1,9 @@
record:
hash_reference:
/: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9
targeted_arch: x86_64
runtime_version: go 1.18
repo_reference:
/: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D
version: 1.0.0
type: Binary

View File

@ -0,0 +1,13 @@
record:
name: Laconic
ipld_types:
- type3
- type11
- schema2
- codec5
type: ChainRegistrationRecord
version: 0.11.2
chain_id: laconic_9000-1
network_id: "1392"
genesis_hash:
/: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D

View File

@ -0,0 +1,8 @@
record:
image_id: 77af4d6b9913
binary_reference:
/: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9
repo_reference:
/: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D
version: 1.0.0
type: DockerImage

View File

@ -1,7 +0,0 @@
record:
type: GeneralRecord
name: foo
version: 1.0.0
tags:
- tagA
- tagB

View File

@ -0,0 +1,6 @@
record:
name: cosmos-sdk
repo_reference: https://github.com/cosmos/cosmos-sdk
description: This is a description string
version: 0.46.7
type: GitRepository

View File

@ -1,7 +1,8 @@
record: record:
attr1: value1 repo_reference:
attr2: value2
link1:
/: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D
link2:
/: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9 /: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9
js_package_ref:
/: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D
version: 1.0.0
type: JSPackage
name: test-JSPackage

View File

@ -0,0 +1,9 @@
record:
service_provider_ref:
/: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9
auction_ref:
/: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D
watcher_ref:
/: QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR
version: 1.0.0
type: ResponderContract

View File

@ -1,6 +1,6 @@
record: record:
type: ServiceProviderRegistration type: ServiceProviderRecord
bond_id: madeUpBondID bond_id: madeUpBondID
laconic_id: madeUpLaconicID laconic_id: madeUpLaconicID
version: 1.0.0 version: 1.0.0

View File

@ -0,0 +1,15 @@
record:
metadata:
version: 0.32.0
chain_reference:
/: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9
repo_reference:
/: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D
wasm:
hash_reference:
/: QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR
metadata:
compiler_version: 1.15.0
execution_engine_version: 0.16.1
version: 1.0.0
type: WatcherRegistrationRecord

View File

@ -1,7 +1,10 @@
record: record:
type: WebsiteRegistrationRecord type: WebsiteRegistrationRecord
url: https://cerc.io url: https://cerc.io
repo_registration_record_cid: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D repo_reference:
build_artifact_cid: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9 /: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D
tls_cerc_cid: QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR build_artifact_ref:
/: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9
tls_cert_ref:
/: QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR
version: 1.0.0 version: 1.0.0

View File

@ -47,20 +47,24 @@ func Int64ToBytes(num int64) []byte {
return buf.Bytes() return buf.Bytes()
} }
func MustMarshalJSON[T any](val T) (bytes []byte) { // MarshalMapToJSONBytes converts map[string]interface{} to bytes.
func MarshalMapToJSONBytes(val map[string]interface{}) (bytes []byte) {
bytes, err := json.Marshal(val) bytes, err := json.Marshal(val)
if err != nil { if err != nil {
panic("JSON marshal error:" + err.Error()) panic("Marshal error.")
} }
return return
} }
func MustUnmarshalJSON[T any](bytes []byte) T { // UnMarshalMapFromJSONBytes converts bytes to map[string]interface{}.
var val T func UnMarshalMapFromJSONBytes(bytes []byte) map[string]interface{} {
var val map[string]interface{}
err := json.Unmarshal(bytes, &val) err := json.Unmarshal(bytes, &val)
if err != nil { if err != nil {
panic("JSON unmarshal error:" + err.Error()) panic("Unmarshal error.")
} }
return val return val
} }

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"fmt" "fmt"
"os" "os"
"reflect" "strings"
"github.com/cerc-io/laconicd/x/registry/client/cli" "github.com/cerc-io/laconicd/x/registry/client/cli"
"github.com/cerc-io/laconicd/x/registry/helpers" "github.com/cerc-io/laconicd/x/registry/helpers"
@ -38,9 +38,15 @@ func (suite *KeeperTestSuite) TestGrpcGetRecordLists() {
sr := suite.Require() sr := suite.Require()
var recordId string var recordId string
examples := []string{ examples := []string{
"/../helpers/examples/binary_example.yml",
"/../helpers/examples/chain_registration_example.yml",
"/../helpers/examples/docker_image_example.yml",
"/../helpers/examples/git_repo_example.yml",
"/../helpers/examples/js_package_example.yml",
"/../helpers/examples/responder_contract_example.yml",
"/../helpers/examples/service_provider_example.yml", "/../helpers/examples/service_provider_example.yml",
"/../helpers/examples/watcher_registration_example.yml",
"/../helpers/examples/website_registration_example.yml", "/../helpers/examples/website_registration_example.yml",
"/../helpers/examples/general_record_example.yml",
} }
testCases := []struct { testCases := []struct {
msg string msg string
@ -61,16 +67,17 @@ func (suite *KeeperTestSuite) TestGrpcGetRecordLists() {
&registrytypes.QueryListRecordsRequest{}, &registrytypes.QueryListRecordsRequest{},
true, true,
false, false,
3, len(examples),
}, },
{ {
"Filter with type", "Filter with type",
&registrytypes.QueryListRecordsRequest{ &registrytypes.QueryListRecordsRequest{
Attributes: []*registrytypes.QueryListRecordsRequest_KeyValueInput{ Attributes: []*registrytypes.QueryListRecordsRequest_KeyValueInput{
{ {
Key: "type", Key: "type---",
Value: &registrytypes.QueryListRecordsRequest_ValueInput{ Value: &registrytypes.QueryListRecordsRequest_ValueInput{
Value: &registrytypes.QueryListRecordsRequest_ValueInput_String_{"WebsiteRegistrationRecord"}, Type: "string",
String_: "WebsiteRegistrationRecord",
}, },
}, },
}, },
@ -80,77 +87,15 @@ func (suite *KeeperTestSuite) TestGrpcGetRecordLists() {
false, false,
1, 1,
}, },
// Skip the following test as querying with recursive values not supported (PR https://git.vdb.to/cerc-io/laconicd/pulls/112)
// See function RecordsFromAttributes (QueryValueToJSON call) in the registry keeper implementation (x/registry/keeper/keeper.go)
// {
// "Filter with tag (extant) (https://git.vdb.to/cerc-io/laconicd/issues/129)",
// &registrytypes.QueryListRecordsRequest{
// Attributes: []*registrytypes.QueryListRecordsRequest_KeyValueInput{
// {
// Key: "tags",
// // Value: &registrytypes.QueryListRecordsRequest_ValueInput{
// // Value: &registrytypes.QueryListRecordsRequest_ValueInput_String_{"tagA"},
// // },
// Value: &registrytypes.QueryListRecordsRequest_ValueInput{
// Value: &registrytypes.QueryListRecordsRequest_ValueInput_Array{Array: &registrytypes.QueryListRecordsRequest_ArrayInput{
// Values: []*registrytypes.QueryListRecordsRequest_ValueInput{
// {
// Value: &registrytypes.QueryListRecordsRequest_ValueInput_String_{"tagA"},
// },
// },
// }},
// },
// // Throws: "Recursive query values are not supported"
// },
// },
// All: true,
// },
// true,
// false,
// 1,
// },
{ {
"Filter with tag (non-existent) (https://git.vdb.to/cerc-io/laconicd/issues/129)", "Filter with attributes ServiceProviderRecord",
&registrytypes.QueryListRecordsRequest{ &registrytypes.QueryListRecordsRequest{
Attributes: []*registrytypes.QueryListRecordsRequest_KeyValueInput{ Attributes: []*registrytypes.QueryListRecordsRequest_KeyValueInput{
{ {
Key: "tags", Key: "x500---state_name---",
Value: &registrytypes.QueryListRecordsRequest_ValueInput{ Value: &registrytypes.QueryListRecordsRequest_ValueInput{
Value: &registrytypes.QueryListRecordsRequest_ValueInput_String_{"NOEXIST"}, Type: "string",
}, String_: "california",
},
},
All: true,
},
true,
false,
0,
},
{
"Filter test for key collision (https://git.vdb.to/cerc-io/laconicd/issues/122)",
&registrytypes.QueryListRecordsRequest{
Attributes: []*registrytypes.QueryListRecordsRequest_KeyValueInput{
{
Key: "typ",
Value: &registrytypes.QueryListRecordsRequest_ValueInput{
Value: &registrytypes.QueryListRecordsRequest_ValueInput_String_{"eWebsiteRegistrationRecord"},
},
},
},
All: true,
},
true,
false,
0,
},
{
"Filter with attributes ServiceProviderRegistration",
&registrytypes.QueryListRecordsRequest{
Attributes: []*registrytypes.QueryListRecordsRequest_KeyValueInput{
{
Key: "x500state_name",
Value: &registrytypes.QueryListRecordsRequest_ValueInput{
Value: &registrytypes.QueryListRecordsRequest_ValueInput_String_{"california"},
}, },
}, },
}, },
@ -160,6 +105,42 @@ func (suite *KeeperTestSuite) TestGrpcGetRecordLists() {
false, false,
1, 1,
}, },
{
"Filter with attributes WatcherRegistrationRecord",
&registrytypes.QueryListRecordsRequest{
Attributes: []*registrytypes.QueryListRecordsRequest_KeyValueInput{
{
Key: "metadata---chain_reference---/---",
Value: &registrytypes.QueryListRecordsRequest_ValueInput{
Type: "string",
String_: "QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9",
},
},
},
All: true,
},
true,
false,
1,
},
{
"Filter with attributes version",
&registrytypes.QueryListRecordsRequest{
Attributes: []*registrytypes.QueryListRecordsRequest_KeyValueInput{
{
Key: "version---",
Value: &registrytypes.QueryListRecordsRequest_ValueInput{
Type: "string",
String_: "1.0.0",
},
},
},
All: true,
},
true,
false,
7,
},
} }
for _, test := range testCases { for _, test := range testCases {
suite.Run(fmt.Sprintf("Case %s ", test.msg), func() { suite.Run(fmt.Sprintf("Case %s ", test.msg), func() {
@ -169,7 +150,8 @@ func (suite *KeeperTestSuite) TestGrpcGetRecordLists() {
sr.NoError(err) sr.NoError(err)
payloadType, err := cli.GetPayloadFromFile(fmt.Sprint(dir, example)) payloadType, err := cli.GetPayloadFromFile(fmt.Sprint(dir, example))
sr.NoError(err) sr.NoError(err)
payload := payloadType.ToPayload() payload, err := payloadType.ToPayload()
sr.NoError(err)
record, err := suite.app.RegistryKeeper.ProcessSetRecord(ctx, registrytypes.MsgSetRecord{ record, err := suite.app.RegistryKeeper.ProcessSetRecord(ctx, registrytypes.MsgSetRecord{
BondId: suite.bond.GetId(), BondId: suite.bond.GetId(),
Signer: suite.accounts[0].String(), Signer: suite.accounts[0].String(),
@ -185,38 +167,17 @@ func (suite *KeeperTestSuite) TestGrpcGetRecordLists() {
} else { } else {
sr.NoError(err) sr.NoError(err)
sr.Equal(test.noOfRecords, len(resp.GetRecords())) sr.Equal(test.noOfRecords, len(resp.GetRecords()))
if test.createRecords && test.noOfRecords > 0 { if test.createRecords {
recordId = resp.GetRecords()[0].GetId() recordId = resp.GetRecords()[0].GetId()
sr.NotZero(resp.GetRecords()) sr.NotZero(resp.GetRecords())
sr.Equal(resp.GetRecords()[0].GetBondId(), suite.bond.GetId()) sr.Equal(resp.GetRecords()[0].GetBondId(), suite.bond.GetId())
for _, record := range resp.GetRecords() { for _, record := range resp.GetRecords() {
recAttr := helpers.MustUnmarshalJSON[registrytypes.AttributeMap](record.Attributes) bz, err := registrytypes.GetJSONBytesFromAny(*record.Attributes)
sr.NoError(err)
recAttr := helpers.UnMarshalMapFromJSONBytes(bz)
for _, attr := range test.req.GetAttributes() { for _, attr := range test.req.GetAttributes() {
enc, err := keeper.QueryValueToJSON(attr.Value) sr.Equal(keeper.GetAttributeValue(attr.Value), getAttributesFromRecord(recAttr, attr.Key))
sr.NoError(err)
av := helpers.MustUnmarshalJSON[any](enc)
if nil != av && nil != recAttr[attr.Key] &&
reflect.Slice == reflect.TypeOf(recAttr[attr.Key]).Kind() &&
reflect.Slice != reflect.TypeOf(av).Kind() {
found := false
allValues := recAttr[attr.Key].([]interface{})
for i := range allValues {
if av == allValues[i] {
fmt.Printf("Found %s in %s", allValues[i], recAttr[attr.Key])
found = true
}
}
sr.Equal(true, found, fmt.Sprintf("Unable to find %s in %s", av, recAttr[attr.Key]))
} else {
if attr.Key[:4] == "x500" {
sr.Equal(av, recAttr["x500"].(map[string]interface{})[attr.Key[4:]])
} else {
sr.Equal(av, recAttr[attr.Key])
}
}
} }
} }
} }
@ -307,6 +268,17 @@ func (suite *KeeperTestSuite) TestGrpcGetRecordLists() {
} }
} }
func getAttributesFromRecord(recordAttr map[string]interface{}, key string) interface{} {
keys := strings.Split(key, "---")
for i, str := range keys {
if i == len(keys)-2 {
return recordAttr[str].(string)
}
recordAttr = recordAttr[str].(map[string]interface{})
}
return nil
}
func (suite *KeeperTestSuite) TestGrpcQueryRegistryModuleBalance() { func (suite *KeeperTestSuite) TestGrpcQueryRegistryModuleBalance() {
grpcClient, ctx := suite.queryClient, suite.ctx grpcClient, ctx := suite.queryClient, suite.ctx
sr := suite.Require() sr := suite.Require()
@ -337,7 +309,8 @@ func (suite *KeeperTestSuite) TestGrpcQueryRegistryModuleBalance() {
for _, example := range examples { for _, example := range examples {
payloadType, err := cli.GetPayloadFromFile(fmt.Sprint(dir, example)) payloadType, err := cli.GetPayloadFromFile(fmt.Sprint(dir, example))
sr.NoError(err) sr.NoError(err)
payload := payloadType.ToPayload() payload, err := payloadType.ToPayload()
sr.NoError(err)
record, err := suite.app.RegistryKeeper.ProcessSetRecord(ctx, registrytypes.MsgSetRecord{ record, err := suite.app.RegistryKeeper.ProcessSetRecord(ctx, registrytypes.MsgSetRecord{
BondId: suite.bond.GetId(), BondId: suite.bond.GetId(),
Signer: suite.accounts[0].String(), Signer: suite.accounts[0].String(),

View File

@ -4,10 +4,15 @@ import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"reflect" // #nosec G702 This is also used in eip712 antehandler code. No new risk introduced.
"sort" "sort"
"time" "time"
errorsmod "cosmossdk.io/errors" errorsmod "cosmossdk.io/errors"
auctionkeeper "github.com/cerc-io/laconicd/x/auction/keeper"
bondkeeper "github.com/cerc-io/laconicd/x/bond/keeper"
"github.com/cerc-io/laconicd/x/registry/helpers"
"github.com/cerc-io/laconicd/x/registry/types"
"github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/codec/legacy" "github.com/cosmos/cosmos-sdk/codec/legacy"
storetypes "github.com/cosmos/cosmos-sdk/store/types" storetypes "github.com/cosmos/cosmos-sdk/store/types"
@ -16,18 +21,6 @@ import (
auth "github.com/cosmos/cosmos-sdk/x/auth/keeper" auth "github.com/cosmos/cosmos-sdk/x/auth/keeper"
bank "github.com/cosmos/cosmos-sdk/x/bank/keeper" bank "github.com/cosmos/cosmos-sdk/x/bank/keeper"
paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" paramtypes "github.com/cosmos/cosmos-sdk/x/params/types"
"github.com/gibson042/canonicaljson-go"
cid "github.com/ipfs/go-cid"
"github.com/ipld/go-ipld-prime"
"github.com/ipld/go-ipld-prime/codec/dagjson"
cidlink "github.com/ipld/go-ipld-prime/linking/cid"
basicnode "github.com/ipld/go-ipld-prime/node/basic"
"github.com/tendermint/tendermint/libs/log"
auctionkeeper "github.com/cerc-io/laconicd/x/auction/keeper"
bondkeeper "github.com/cerc-io/laconicd/x/bond/keeper"
"github.com/cerc-io/laconicd/x/registry/helpers"
"github.com/cerc-io/laconicd/x/registry/types"
) )
var ( var (
@ -104,15 +97,6 @@ func NewKeeper(cdc codec.BinaryCodec, accountKeeper auth.AccountKeeper, bankKeep
} }
} }
// Logger returns a module-specific logger.
func (k Keeper) Logger(ctx sdk.Context) log.Logger {
return logger(ctx)
}
func logger(ctx sdk.Context) log.Logger {
return ctx.Logger().With("module", types.ModuleName)
}
// GetRecordIndexKey Generates Bond ID -> Bond index key. // GetRecordIndexKey Generates Bond ID -> Bond index key.
func GetRecordIndexKey(id string) []byte { func GetRecordIndexKey(id string) []byte {
return append(PrefixCIDToRecordIndex, []byte(id)...) return append(PrefixCIDToRecordIndex, []byte(id)...)
@ -129,8 +113,7 @@ func (k Keeper) GetRecord(ctx sdk.Context, id string) (record types.Record) {
store := ctx.KVStore(k.storeKey) store := ctx.KVStore(k.storeKey)
result := store.Get(GetRecordIndexKey(id)) result := store.Get(GetRecordIndexKey(id))
k.cdc.MustUnmarshal(result, &record) k.cdc.MustUnmarshal(result, &record)
decodeRecordNames(store, &record) return recordObjToRecord(store, record)
return record
} }
// ListRecords - get all records. // ListRecords - get all records.
@ -143,25 +126,20 @@ func (k Keeper) ListRecords(ctx sdk.Context) []types.Record {
for ; itr.Valid(); itr.Next() { for ; itr.Valid(); itr.Next() {
bz := store.Get(itr.Key()) bz := store.Get(itr.Key())
if bz != nil { if bz != nil {
var record types.Record var obj types.Record
k.cdc.MustUnmarshal(bz, &record) k.cdc.MustUnmarshal(bz, &obj)
decodeRecordNames(store, &record) records = append(records, recordObjToRecord(store, obj))
records = append(records, record)
} }
} }
return records return records
} }
// RecordsFromAttributes gets a list of records whose attributes match all provided values
func (k Keeper) RecordsFromAttributes(ctx sdk.Context, attributes []*types.QueryListRecordsRequest_KeyValueInput, all bool) ([]types.Record, error) { func (k Keeper) RecordsFromAttributes(ctx sdk.Context, attributes []*types.QueryListRecordsRequest_KeyValueInput, all bool) ([]types.Record, error) {
resultRecordIds := []string{} resultRecordIds := []string{}
for i, attr := range attributes { for i, attr := range attributes {
suffix, err := QueryValueToJSON(attr.Value) val := GetAttributeValue(attr.Value)
if err != nil { attributeIndex := GetAttributesIndexKey(attr.Key, val)
return nil, err
}
attributeIndex := GetAttributesIndexKey(attr.Key, suffix)
recordIds, err := k.GetAttributeMapping(ctx, attributeIndex) recordIds, err := k.GetAttributeMapping(ctx, attributeIndex)
if err != nil { if err != nil {
return nil, err return nil, err
@ -180,62 +158,32 @@ func (k Keeper) RecordsFromAttributes(ctx sdk.Context, attributes []*types.Query
continue continue
} }
store := ctx.KVStore(k.storeKey) store := ctx.KVStore(k.storeKey)
decodeRecordNames(store, &record) recordWithNames := recordObjToRecord(store, record)
if !all && len(record.Names) == 0 { if !all && len(recordWithNames.Names) == 0 {
continue continue
} }
records = append(records, record) records = append(records, recordWithNames)
} }
return records, nil return records, nil
} }
// TODO not recursive, and only should be if we want to support querying with whole sub-objects, func GetAttributeValue(input *types.QueryListRecordsRequest_ValueInput) interface{} {
// which seems unnecessary. if input.Type == "int" {
func QueryValueToJSON(input *types.QueryListRecordsRequest_ValueInput) ([]byte, error) { return input.GetInt()
np := basicnode.Prototype.Any
nb := np.NewBuilder()
switch value := input.GetValue().(type) {
case *types.QueryListRecordsRequest_ValueInput_String_:
err := nb.AssignString(value.String_)
if err != nil {
return nil, err
}
case *types.QueryListRecordsRequest_ValueInput_Int:
err := nb.AssignInt(value.Int)
if err != nil {
return nil, err
}
case *types.QueryListRecordsRequest_ValueInput_Float:
err := nb.AssignFloat(value.Float)
if err != nil {
return nil, err
}
case *types.QueryListRecordsRequest_ValueInput_Boolean:
err := nb.AssignBool(value.Boolean)
if err != nil {
return nil, err
}
case *types.QueryListRecordsRequest_ValueInput_Link:
link := cidlink.Link{Cid: cid.MustParse(value.Link)}
err := nb.AssignLink(link)
if err != nil {
return nil, err
}
case *types.QueryListRecordsRequest_ValueInput_Array:
return nil, fmt.Errorf("recursive query values are not supported")
case *types.QueryListRecordsRequest_ValueInput_Map:
return nil, fmt.Errorf("recursive query values are not supported")
default:
return nil, fmt.Errorf("value has unexpected type %T", value)
} }
if input.Type == "float" {
n := nb.Build() return input.GetFloat()
var buf bytes.Buffer
if err := dagjson.Encode(n, &buf); err != nil {
return nil, fmt.Errorf("encoding value to JSON failed: %w", err)
} }
return buf.Bytes(), nil if input.Type == "string" {
return input.GetString_()
}
if input.Type == "boolean" {
return input.GetBoolean()
}
if input.Type == "reference" {
return input.GetReference().GetId()
}
return nil
} }
func getIntersection(a []string, b []string) []string { func getIntersection(a []string, b []string) []string {
@ -286,9 +234,9 @@ func (k Keeper) GetRecordExpiryQueue(ctx sdk.Context) []*types.ExpiryQueueRecord
} }
// ProcessSetRecord creates a record. // ProcessSetRecord creates a record.
func (k Keeper) ProcessSetRecord(ctx sdk.Context, msg types.MsgSetRecord) (*types.ReadableRecord, error) { func (k Keeper) ProcessSetRecord(ctx sdk.Context, msg types.MsgSetRecord) (*types.RecordType, error) {
payload := msg.Payload.ToReadablePayload() payload := msg.Payload.ToReadablePayload()
record := types.ReadableRecord{Attributes: payload.RecordAttributes, BondID: msg.BondId} record := types.RecordType{Attributes: payload.Record, BondID: msg.BondId}
// Check signatures. // Check signatures.
resourceSignBytes, _ := record.GetSignBytes() resourceSignBytes, _ := record.GetSignBytes()
@ -308,12 +256,14 @@ func (k Keeper) ProcessSetRecord(ctx sdk.Context, msg types.MsgSetRecord) (*type
for _, sig := range payload.Signatures { for _, sig := range payload.Signatures {
pubKey, err := legacy.PubKeyFromBytes(helpers.BytesFromBase64(sig.PubKey)) pubKey, err := legacy.PubKeyFromBytes(helpers.BytesFromBase64(sig.PubKey))
if err != nil { if err != nil {
return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, fmt.Sprint("Error decoding pubKey from bytes: ", err)) fmt.Println("Error decoding pubKey from bytes: ", err)
return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "Invalid public key.")
} }
sigOK := pubKey.VerifySignature(resourceSignBytes, helpers.BytesFromBase64(sig.Sig)) sigOK := pubKey.VerifySignature(resourceSignBytes, helpers.BytesFromBase64(sig.Sig))
if !sigOK { if !sigOK {
return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, fmt.Sprint("Signature mismatch: ", sig.PubKey)) fmt.Println("Signature mismatch: ", sig.PubKey)
return nil, errorsmod.Wrap(sdkerrors.ErrUnauthorized, "Invalid signature.")
} }
record.Owners = append(record.Owners, pubKey.Address().String()) record.Owners = append(record.Owners, pubKey.Address().String())
} }
@ -327,13 +277,11 @@ func (k Keeper) ProcessSetRecord(ctx sdk.Context, msg types.MsgSetRecord) (*type
return &record, nil return &record, nil
} }
func (k Keeper) processRecord(ctx sdk.Context, record *types.ReadableRecord, isRenewal bool) error { func (k Keeper) processRecord(ctx sdk.Context, record *types.RecordType, isRenewal bool) error {
params := k.GetParams(ctx) params := k.GetParams(ctx)
rent := params.RecordRent rent := params.RecordRent
err := k.bondKeeper.TransferCoinsToModuleAccount( err := k.bondKeeper.TransferCoinsToModuleAccount(ctx, record.BondID, types.RecordRentModuleAccountName, sdk.NewCoins(rent))
ctx, record.BondID, types.RecordRentModuleAccountName, sdk.NewCoins(rent),
)
if err != nil { if err != nil {
return err return err
} }
@ -348,14 +296,7 @@ func (k Keeper) processRecord(ctx sdk.Context, record *types.ReadableRecord, isR
} }
k.PutRecord(ctx, recordObj) k.PutRecord(ctx, recordObj)
// TODO look up/validate record type here if err := k.ProcessAttributes(ctx, *record); err != nil {
if err := k.processAttributes(ctx, record.Attributes, record.ID, ""); err != nil {
return err
}
expiryTimeKey := GetAttributesIndexKey(ExpiryTimeAttributeName, []byte(record.ExpiryTime))
if err := k.SetAttributeMapping(ctx, expiryTimeKey, record.ID); err != nil {
return err return err
} }
@ -376,66 +317,45 @@ func (k Keeper) PutRecord(ctx sdk.Context, record types.Record) {
k.updateBlockChangeSetForRecord(ctx, record.Id) k.updateBlockChangeSetForRecord(ctx, record.Id)
} }
func (k Keeper) processAttributes(ctx sdk.Context, attrs types.AttributeMap, id string, prefix string) error { func (k Keeper) ProcessAttributes(ctx sdk.Context, record types.RecordType) error {
np := basicnode.Prototype.Map if err := k.insertAttributes(ctx, record.ID, record.Attributes, ""); err != nil {
nb := np.NewBuilder()
encAttrs, err := canonicaljson.Marshal(attrs)
if err != nil {
return err return err
} }
if len(attrs) == 0 {
encAttrs = []byte("{}") expiryTimeKey := GetAttributesIndexKey(ExpiryTimeAttributeName, record.ExpiryTime)
} if err := k.setAttributeMapping(ctx, expiryTimeKey, record.ID); err != nil {
err = dagjson.Decode(nb, bytes.NewReader(encAttrs)) return err
if err != nil {
return fmt.Errorf("failed to decode attributes: %w", err)
}
n := nb.Build()
if n.Kind() != ipld.Kind_Map {
return fmt.Errorf("record attributes must be a map, not %T", n.Kind())
} }
return k.processAttributeMap(ctx, n, id, prefix)
}
func (k Keeper) processAttributeMap(ctx sdk.Context, n ipld.Node, id string, prefix string) error {
for it := n.MapIterator(); !it.Done(); {
//nolint:misspell
keynode, valuenode, err := it.Next()
if err != nil {
return err
}
key, err := keynode.AsString()
if err != nil {
return err
}
if valuenode.Kind() == ipld.Kind_Map {
err := k.processAttributeMap(ctx, valuenode, id, key)
if err != nil {
return err
}
} else {
var buf bytes.Buffer
if err := dagjson.Encode(valuenode, &buf); err != nil {
return err
}
value := buf.Bytes()
indexKey := GetAttributesIndexKey(prefix+key, value)
if err := k.SetAttributeMapping(ctx, indexKey, id); err != nil {
return err
}
}
}
return nil return nil
} }
func GetAttributesIndexKey(key string, suffix []byte) []byte { func (k Keeper) insertAttributes(ctx sdk.Context, recordID string, attr interface{}, keyPrefix string) error {
keyString := fmt.Sprintf("%s=%s", key, suffix) if reflect.TypeOf(attr).Kind() == reflect.Map {
val := attr.(map[string]interface{})
// #nosec G705
for key := range val {
newKeyPrefix := fmt.Sprint(keyPrefix + key + "---")
if err := k.insertAttributes(ctx, recordID, val[key], newKeyPrefix); err != nil {
return err
}
}
} else {
indexKey := GetAttributesIndexKey(keyPrefix, attr)
if err := k.setAttributeMapping(ctx, indexKey, recordID); err != nil {
return err
}
}
return nil
}
func GetAttributesIndexKey(key string, value interface{}) []byte {
keyString := fmt.Sprintf("%s%s", key, value)
return append(PrefixAttributesIndex, []byte(keyString)...) return append(PrefixAttributesIndex, []byte(keyString)...)
} }
func (k Keeper) SetAttributeMapping(ctx sdk.Context, key []byte, recordID string) error { func (k Keeper) setAttributeMapping(ctx sdk.Context, key []byte, recordID string) error {
store := ctx.KVStore(k.storeKey) store := ctx.KVStore(k.storeKey)
var recordIds []string var recordIds []string
if store.Has(key) { if store.Has(key) {
@ -443,6 +363,8 @@ func (k Keeper) SetAttributeMapping(ctx sdk.Context, key []byte, recordID string
if err != nil { if err != nil {
return fmt.Errorf("cannot unmarshal byte array, error, %w", err) return fmt.Errorf("cannot unmarshal byte array, error, %w", err)
} }
} else {
recordIds = []string{}
} }
recordIds = append(recordIds, recordID) recordIds = append(recordIds, recordID)
bz, err := json.Marshal(recordIds) bz, err := json.Marshal(recordIds)
@ -457,13 +379,12 @@ func (k Keeper) GetAttributeMapping(ctx sdk.Context, key []byte) ([]string, erro
store := ctx.KVStore(k.storeKey) store := ctx.KVStore(k.storeKey)
if !store.Has(key) { if !store.Has(key) {
k.Logger(ctx).Debug(fmt.Sprintf("store doesn't have key: %q", key)) return nil, fmt.Errorf("store doesn't have key")
return []string{}, nil
} }
var recordIds []string var recordIds []string
if err := json.Unmarshal(store.Get(key), &recordIds); err != nil { if err := json.Unmarshal(store.Get(key), &recordIds); err != nil {
return nil, fmt.Errorf("cannot unmarshal byte array, error, %w", err) return nil, fmt.Errorf("cannont unmarshal byte array, error, %w", err)
} }
return recordIds, nil return recordIds, nil
@ -641,7 +562,7 @@ func (k Keeper) GetModuleBalances(ctx sdk.Context) []*types.AccountBalance {
return balances return balances
} }
func decodeRecordNames(store sdk.KVStore, record *types.Record) { func recordObjToRecord(store sdk.KVStore, record types.Record) types.Record {
reverseNameIndexKey := GetCIDToNamesIndexKey(record.Id) reverseNameIndexKey := GetCIDToNamesIndexKey(record.Id)
if store.Has(reverseNameIndexKey) { if store.Has(reverseNameIndexKey) {
@ -652,4 +573,6 @@ func decodeRecordNames(store sdk.KVStore, record *types.Record) {
record.Names = names record.Names = names
} }
return record
} }

View File

@ -2,12 +2,9 @@ package keeper
import ( import (
"context" "context"
"fmt"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cerc-io/laconicd/utils"
"github.com/cerc-io/laconicd/x/registry/types" "github.com/cerc-io/laconicd/x/registry/types"
sdk "github.com/cosmos/cosmos-sdk/types"
) )
type msgServer struct { type msgServer struct {
@ -23,14 +20,16 @@ var _ types.MsgServer = msgServer{}
func (m msgServer) SetRecord(c context.Context, msg *types.MsgSetRecord) (*types.MsgSetRecordResponse, error) { func (m msgServer) SetRecord(c context.Context, msg *types.MsgSetRecord) (*types.MsgSetRecordResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
_, err := sdk.AccAddressFromBech32(msg.Signer) _, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
} }
record, err := m.Keeper.ProcessSetRecord(ctx, *msg) record, err := m.Keeper.ProcessSetRecord(ctx, types.MsgSetRecord{
BondId: msg.GetBondId(),
Signer: msg.GetSigner(),
Payload: msg.GetPayload(),
})
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -49,15 +48,12 @@ func (m msgServer) SetRecord(c context.Context, msg *types.MsgSetRecord) (*types
), ),
}) })
m.logTxGasConsumed(ctx, "SetRecord")
return &types.MsgSetRecordResponse{Id: record.ID}, nil return &types.MsgSetRecordResponse{Id: record.ID}, nil
} }
//nolint: all
func (m msgServer) SetName(c context.Context, msg *types.MsgSetName) (*types.MsgSetNameResponse, error) { func (m msgServer) SetName(c context.Context, msg *types.MsgSetName) (*types.MsgSetNameResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
_, err := sdk.AccAddressFromBech32(msg.Signer) _, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -79,16 +75,11 @@ func (m msgServer) SetName(c context.Context, msg *types.MsgSetName) (*types.Msg
sdk.NewAttribute(types.AttributeKeySigner, msg.Signer), sdk.NewAttribute(types.AttributeKeySigner, msg.Signer),
), ),
}) })
m.logTxGasConsumed(ctx, "SetName")
return &types.MsgSetNameResponse{}, nil return &types.MsgSetNameResponse{}, nil
} }
func (m msgServer) ReserveName(c context.Context, msg *types.MsgReserveAuthority) (*types.MsgReserveAuthorityResponse, error) { func (m msgServer) ReserveName(c context.Context, msg *types.MsgReserveAuthority) (*types.MsgReserveAuthorityResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
_, err := sdk.AccAddressFromBech32(msg.Signer) _, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -114,16 +105,12 @@ func (m msgServer) ReserveName(c context.Context, msg *types.MsgReserveAuthority
sdk.NewAttribute(types.AttributeKeySigner, msg.Signer), sdk.NewAttribute(types.AttributeKeySigner, msg.Signer),
), ),
}) })
m.logTxGasConsumed(ctx, "ReserveName")
return &types.MsgReserveAuthorityResponse{}, nil return &types.MsgReserveAuthorityResponse{}, nil
} }
//nolint: all
func (m msgServer) SetAuthorityBond(c context.Context, msg *types.MsgSetAuthorityBond) (*types.MsgSetAuthorityBondResponse, error) { func (m msgServer) SetAuthorityBond(c context.Context, msg *types.MsgSetAuthorityBond) (*types.MsgSetAuthorityBondResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
_, err := sdk.AccAddressFromBech32(msg.Signer) _, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -145,16 +132,11 @@ func (m msgServer) SetAuthorityBond(c context.Context, msg *types.MsgSetAuthorit
sdk.NewAttribute(types.AttributeKeySigner, msg.Signer), sdk.NewAttribute(types.AttributeKeySigner, msg.Signer),
), ),
}) })
m.logTxGasConsumed(ctx, "SetAuthorityBond")
return &types.MsgSetAuthorityBondResponse{}, nil return &types.MsgSetAuthorityBondResponse{}, nil
} }
func (m msgServer) DeleteName(c context.Context, msg *types.MsgDeleteNameAuthority) (*types.MsgDeleteNameAuthorityResponse, error) { func (m msgServer) DeleteName(c context.Context, msg *types.MsgDeleteNameAuthority) (*types.MsgDeleteNameAuthorityResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
_, err := sdk.AccAddressFromBech32(msg.Signer) _, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -175,16 +157,11 @@ func (m msgServer) DeleteName(c context.Context, msg *types.MsgDeleteNameAuthori
sdk.NewAttribute(types.AttributeKeySigner, msg.Signer), sdk.NewAttribute(types.AttributeKeySigner, msg.Signer),
), ),
}) })
m.logTxGasConsumed(ctx, "DeleteName")
return &types.MsgDeleteNameAuthorityResponse{}, nil return &types.MsgDeleteNameAuthorityResponse{}, nil
} }
func (m msgServer) RenewRecord(c context.Context, msg *types.MsgRenewRecord) (*types.MsgRenewRecordResponse, error) { func (m msgServer) RenewRecord(c context.Context, msg *types.MsgRenewRecord) (*types.MsgRenewRecordResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
_, err := sdk.AccAddressFromBech32(msg.Signer) _, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -205,16 +182,12 @@ func (m msgServer) RenewRecord(c context.Context, msg *types.MsgRenewRecord) (*t
sdk.NewAttribute(types.AttributeKeySigner, msg.Signer), sdk.NewAttribute(types.AttributeKeySigner, msg.Signer),
), ),
}) })
m.logTxGasConsumed(ctx, "RenewRecord")
return &types.MsgRenewRecordResponse{}, nil return &types.MsgRenewRecordResponse{}, nil
} }
//nolint: all
func (m msgServer) AssociateBond(c context.Context, msg *types.MsgAssociateBond) (*types.MsgAssociateBondResponse, error) { func (m msgServer) AssociateBond(c context.Context, msg *types.MsgAssociateBond) (*types.MsgAssociateBondResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
_, err := sdk.AccAddressFromBech32(msg.Signer) _, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -237,16 +210,11 @@ func (m msgServer) AssociateBond(c context.Context, msg *types.MsgAssociateBond)
sdk.NewAttribute(types.AttributeKeySigner, msg.Signer), sdk.NewAttribute(types.AttributeKeySigner, msg.Signer),
), ),
}) })
m.logTxGasConsumed(ctx, "AssociateBond")
return &types.MsgAssociateBondResponse{}, nil return &types.MsgAssociateBondResponse{}, nil
} }
func (m msgServer) DissociateBond(c context.Context, msg *types.MsgDissociateBond) (*types.MsgDissociateBondResponse, error) { func (m msgServer) DissociateBond(c context.Context, msg *types.MsgDissociateBond) (*types.MsgDissociateBondResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
_, err := sdk.AccAddressFromBech32(msg.Signer) _, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -267,16 +235,11 @@ func (m msgServer) DissociateBond(c context.Context, msg *types.MsgDissociateBon
sdk.NewAttribute(types.AttributeKeySigner, msg.Signer), sdk.NewAttribute(types.AttributeKeySigner, msg.Signer),
), ),
}) })
m.logTxGasConsumed(ctx, "DissociateBond")
return &types.MsgDissociateBondResponse{}, nil return &types.MsgDissociateBondResponse{}, nil
} }
func (m msgServer) DissociateRecords(c context.Context, msg *types.MsgDissociateRecords) (*types.MsgDissociateRecordsResponse, error) { func (m msgServer) DissociateRecords(c context.Context, msg *types.MsgDissociateRecords) (*types.MsgDissociateRecordsResponse, error) {
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
_, err := sdk.AccAddressFromBech32(msg.Signer) _, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -297,16 +260,11 @@ func (m msgServer) DissociateRecords(c context.Context, msg *types.MsgDissociate
sdk.NewAttribute(types.AttributeKeySigner, msg.Signer), sdk.NewAttribute(types.AttributeKeySigner, msg.Signer),
), ),
}) })
m.logTxGasConsumed(ctx, "DissociateRecords")
return &types.MsgDissociateRecordsResponse{}, nil return &types.MsgDissociateRecordsResponse{}, nil
} }
func (m msgServer) ReAssociateRecords(c context.Context, msg *types.MsgReAssociateRecords) (*types.MsgReAssociateRecordsResponse, error) { //nolint: all func (m msgServer) ReAssociateRecords(c context.Context, msg *types.MsgReAssociateRecords) (*types.MsgReAssociateRecordsResponse, error) { //nolint: all
ctx := sdk.UnwrapSDKContext(c) ctx := sdk.UnwrapSDKContext(c)
ctx = *utils.CtxWithCustomKVGasConfig(&ctx)
_, err := sdk.AccAddressFromBech32(msg.Signer) _, err := sdk.AccAddressFromBech32(msg.Signer)
if err != nil { if err != nil {
return nil, err return nil, err
@ -328,13 +286,5 @@ func (m msgServer) ReAssociateRecords(c context.Context, msg *types.MsgReAssocia
sdk.NewAttribute(types.AttributeKeySigner, msg.Signer), sdk.NewAttribute(types.AttributeKeySigner, msg.Signer),
), ),
}) })
m.logTxGasConsumed(ctx, "ReAssociateRecords")
return &types.MsgReAssociateRecordsResponse{}, nil return &types.MsgReAssociateRecordsResponse{}, nil
} }
func (m msgServer) logTxGasConsumed(ctx sdk.Context, tx string) {
gasConsumed := ctx.GasMeter().GasConsumed()
m.Keeper.Logger(ctx).Info("tx executed", "method", tx, "gas_consumed", fmt.Sprintf("%d", gasConsumed))
}

View File

@ -611,7 +611,7 @@ func (k Keeper) ProcessAuthorityExpiryQueue(ctx sdk.Context) {
k.SetNameAuthority(ctx, name, &authority) k.SetNameAuthority(ctx, name, &authority)
k.DeleteAuthorityExpiryQueue(ctx, name, authority) k.DeleteAuthorityExpiryQueue(ctx, name, authority)
k.Logger(ctx).Info(fmt.Sprintf("Marking authority expired as no bond present: %s", name)) ctx.Logger().Info(fmt.Sprintf("Marking authority expired as no bond present: %s", name))
return return
} }
@ -672,7 +672,7 @@ func (k Keeper) AuthorityExpiryQueueIterator(ctx sdk.Context, endTime time.Time)
// TryTakeAuthorityRent tries to take rent from the authority bond. // TryTakeAuthorityRent tries to take rent from the authority bond.
func (k Keeper) TryTakeAuthorityRent(ctx sdk.Context, name string, authority types.NameAuthority) { func (k Keeper) TryTakeAuthorityRent(ctx sdk.Context, name string, authority types.NameAuthority) {
k.Logger(ctx).Info(fmt.Sprintf("Trying to take rent for authority: %s", name)) ctx.Logger().Info(fmt.Sprintf("Trying to take rent for authority: %s", name))
params := k.GetParams(ctx) params := k.GetParams(ctx)
rent := params.AuthorityRent rent := params.AuthorityRent
@ -684,7 +684,7 @@ func (k Keeper) TryTakeAuthorityRent(ctx sdk.Context, name string, authority typ
k.SetNameAuthority(ctx, name, &authority) k.SetNameAuthority(ctx, name, &authority)
k.DeleteAuthorityExpiryQueue(ctx, name, authority) k.DeleteAuthorityExpiryQueue(ctx, name, authority)
k.Logger(ctx).Info(fmt.Sprintf("Insufficient funds in owner account to pay authority rent, marking as expired: %s", name)) ctx.Logger().Info(fmt.Sprintf("Insufficient funds in owner account to pay authority rent, marking as expired: %s", name))
return return
} }
@ -699,7 +699,7 @@ func (k Keeper) TryTakeAuthorityRent(ctx sdk.Context, name string, authority typ
k.SetNameAuthority(ctx, name, &authority) k.SetNameAuthority(ctx, name, &authority)
k.AddBondToAuthorityIndexEntry(ctx, authority.BondId, name) k.AddBondToAuthorityIndexEntry(ctx, authority.BondId, name)
k.Logger(ctx).Info(fmt.Sprintf("Authority rent paid successfully: %s", name)) ctx.Logger().Info(fmt.Sprintf("Authority rent paid successfully: %s", name))
} }
// ListNameAuthorityRecords - get all name authority records. // ListNameAuthorityRecords - get all name authority records.

View File

@ -39,14 +39,14 @@ func (k RecordKeeper) OnAuctionWinnerSelected(ctx sdk.Context, auctionID string)
name := k.GetAuctionToAuthorityMapping(ctx, auctionID) name := k.GetAuctionToAuthorityMapping(ctx, auctionID)
if name == "" { if name == "" {
// We don't know about this auction, ignore. // We don't know about this auction, ignore.
logger(ctx).Info(fmt.Sprintf("Ignoring auction notification, name mapping not found: %s", auctionID)) ctx.Logger().Info(fmt.Sprintf("Ignoring auction notification, name mapping not found: %s", auctionID))
return return
} }
store := ctx.KVStore(k.storeKey) store := ctx.KVStore(k.storeKey)
if !HasNameAuthority(store, name) { if !HasNameAuthority(store, name) {
// We don't know about this authority, ignore. // We don't know about this authority, ignore.
logger(ctx).Info(fmt.Sprintf("Ignoring auction notification, authority not found: %s", auctionID)) ctx.Logger().Info(fmt.Sprintf("Ignoring auction notification, authority not found: %s", auctionID))
return return
} }
@ -71,12 +71,12 @@ func (k RecordKeeper) OnAuctionWinnerSelected(ctx sdk.Context, auctionID string)
// Can be used to check if names are older than the authority itself (stale names). // Can be used to check if names are older than the authority itself (stale names).
authority.Height = uint64(ctx.BlockHeight()) authority.Height = uint64(ctx.BlockHeight())
logger(ctx).Info(fmt.Sprintf("Winner selected, marking authority as active: %s", name)) ctx.Logger().Info(fmt.Sprintf("Winner selected, marking authority as active: %s", name))
} else { } else {
// Mark as expired. // Mark as expired.
authority.Status = types.AuthorityExpired authority.Status = types.AuthorityExpired
logger(ctx).Info(fmt.Sprintf("No winner, marking authority as expired: %s", name)) ctx.Logger().Info(fmt.Sprintf("No winner, marking authority as expired: %s", name))
} }
authority.AuctionId = "" authority.AuctionId = ""
@ -85,7 +85,7 @@ func (k RecordKeeper) OnAuctionWinnerSelected(ctx sdk.Context, auctionID string)
// Forget about this auction now, we no longer need it. // Forget about this auction now, we no longer need it.
removeAuctionToAuthorityMapping(store, auctionID) removeAuctionToAuthorityMapping(store, auctionID)
} else { } else {
logger(ctx).Info(fmt.Sprintf("Ignoring auction notification, status: %s", auctionObj.Status)) ctx.Logger().Info(fmt.Sprintf("Ignoring auction notification, status: %s", auctionObj.Status))
} }
} }
@ -147,10 +147,9 @@ func (k RecordKeeper) QueryRecordsByBond(ctx sdk.Context, bondID string) []types
cid := itr.Key()[len(bondIDPrefix):] cid := itr.Key()[len(bondIDPrefix):]
bz := store.Get(append(PrefixCIDToRecordIndex, cid...)) bz := store.Get(append(PrefixCIDToRecordIndex, cid...))
if bz != nil { if bz != nil {
var record types.Record var obj types.Record
k.cdc.MustUnmarshal(bz, &record) k.cdc.MustUnmarshal(bz, &obj)
decodeRecordNames(store, &record) records = append(records, recordObjToRecord(store, obj))
records = append(records, record)
} }
} }
@ -174,7 +173,7 @@ func (k Keeper) ProcessRenewRecord(ctx sdk.Context, msg types.MsgRenewRecord) er
return errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "Renewal not required.") return errorsmod.Wrap(sdkerrors.ErrInvalidRequest, "Renewal not required.")
} }
recordType := record.ToReadableRecord() recordType := record.ToRecordType()
err = k.processRecord(ctx, &recordType, true) err = k.processRecord(ctx, &recordType, true)
if err != nil { if err != nil {
return err return err

View File

@ -0,0 +1,5 @@
package types
type Attributes interface {
GetType() string
}

5240
x/registry/types/attributes.pb.go generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -39,6 +39,59 @@ func RegisterInterfaces(registry types.InterfaceRegistry) {
&MsgReAssociateRecords{}, &MsgReAssociateRecords{},
) )
registry.RegisterInterface(
"vulcanize.registry.v1beta1.ServiceProviderRecord",
(*Attributes)(nil),
&ServiceProviderRecord{},
)
registry.RegisterInterface(
"vulcanize.registry.v1beta1.WebsiteRegistrationRecord",
(*Attributes)(nil),
&WebsiteRegistrationRecord{},
)
registry.RegisterInterface(
"vulcanize.registry.v1beta1.GitRepository",
(*Attributes)(nil),
&GitRepository{},
)
registry.RegisterInterface(
"vulcanize.registry.v1beta1.Binary",
(*Attributes)(nil),
&Binary{},
)
registry.RegisterInterface(
"vulcanize.registry.v1beta1.DockerImage",
(*Attributes)(nil),
&DockerImage{},
)
registry.RegisterInterface(
"vulcanize.registry.v1beta1.WatcherRegistrationRecord",
(*Attributes)(nil),
&WatcherRegistrationRecord{},
)
registry.RegisterInterface(
"vulcanize.registry.v1beta1.ResponderContract",
(*Attributes)(nil),
&ResponderContract{},
)
registry.RegisterInterface(
"vulcanize.registry.v1beta1.JSPackage",
(*Attributes)(nil),
&JSPackage{},
)
registry.RegisterInterface(
"vulcanize.registry.v1beta1.ChainRegistrationRecord",
(*Attributes)(nil),
&ChainRegistrationRecord{},
)
msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc)
} }

View File

@ -16,8 +16,8 @@ var (
) )
// NewMsgSetName is the constructor function for MsgSetName. // NewMsgSetName is the constructor function for MsgSetName.
func NewMsgSetName(crn string, cid string, signer sdk.AccAddress) *MsgSetName { func NewMsgSetName(crn string, cid string, signer sdk.AccAddress) MsgSetName {
return &MsgSetName{ return MsgSetName{
Crn: crn, Crn: crn,
Cid: cid, Cid: cid,
Signer: signer.String(), Signer: signer.String(),

File diff suppressed because it is too large Load Diff

View File

@ -2,6 +2,7 @@ package types
import ( import (
errorsmod "cosmossdk.io/errors" errorsmod "cosmossdk.io/errors"
cdctypes "github.com/cosmos/cosmos-sdk/codec/types"
sdk "github.com/cosmos/cosmos-sdk/types" sdk "github.com/cosmos/cosmos-sdk/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
) )
@ -13,11 +14,13 @@ var (
_ sdk.Msg = &MsgDissociateBond{} _ sdk.Msg = &MsgDissociateBond{}
_ sdk.Msg = &MsgDissociateRecords{} _ sdk.Msg = &MsgDissociateRecords{}
_ sdk.Msg = &MsgReAssociateRecords{} _ sdk.Msg = &MsgReAssociateRecords{}
_ cdctypes.UnpackInterfacesMessage = &MsgSetRecord{}
) )
// NewMsgSetRecord is the constructor function for MsgSetRecord. // NewMsgSetRecord is the constructor function for MsgSetRecord.
func NewMsgSetRecord(payload Payload, bondID string, signer sdk.AccAddress) *MsgSetRecord { func NewMsgSetRecord(payload Payload, bondID string, signer sdk.AccAddress) MsgSetRecord {
return &MsgSetRecord{ return MsgSetRecord{
Payload: payload, Payload: payload,
BondId: bondID, BondId: bondID,
Signer: signer.String(), Signer: signer.String(),
@ -58,6 +61,12 @@ func (msg MsgSetRecord) GetSignBytes() []byte {
return sdk.MustSortJSON(bz) return sdk.MustSortJSON(bz)
} }
// UnpackInterfaces implements UnpackInterfacesMessage.UnpackInterfaces
func (msg MsgSetRecord) UnpackInterfaces(unpacker cdctypes.AnyUnpacker) error {
var attr Attributes
return unpacker.UnpackAny(msg.Payload.Record.Attributes, &attr)
}
// NewMsgRenewRecord is the constructor function for MsgRenewRecord. // NewMsgRenewRecord is the constructor function for MsgRenewRecord.
func NewMsgRenewRecord(recordID string, signer sdk.AccAddress) MsgRenewRecord { func NewMsgRenewRecord(recordID string, signer sdk.AccAddress) MsgRenewRecord {
return MsgRenewRecord{ return MsgRenewRecord{

View File

@ -5,7 +5,7 @@ package types
import ( import (
fmt "fmt" fmt "fmt"
_ "github.com/cosmos/cosmos-sdk/codec/types" types1 "github.com/cosmos/cosmos-sdk/codec/types"
types "github.com/cosmos/cosmos-sdk/types" types "github.com/cosmos/cosmos-sdk/types"
_ "github.com/cosmos/gogoproto/gogoproto" _ "github.com/cosmos/gogoproto/gogoproto"
proto "github.com/gogo/protobuf/proto" proto "github.com/gogo/protobuf/proto"
@ -155,17 +155,17 @@ func (m *Params) GetAuthorityAuctionMinimumBid() types.Coin {
return types.Coin{} return types.Coin{}
} }
// Record defines a registry record // Params defines the registry module records
type Record struct { type Record struct {
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty" json:"id" yaml:"id"` Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty" json:"id" yaml:"id"`
BondId string `protobuf:"bytes,2,opt,name=bond_id,json=bondId,proto3" json:"bond_id,omitempty" json:"bondId" yaml:"bondId"` BondId string `protobuf:"bytes,2,opt,name=bond_id,json=bondId,proto3" json:"bond_id,omitempty" json:"bondId" yaml:"bondId"`
CreateTime string `protobuf:"bytes,3,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty" json:"createTime" yaml:"createTime"` CreateTime string `protobuf:"bytes,3,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty" json:"createTime" yaml:"createTime"`
ExpiryTime string `protobuf:"bytes,4,opt,name=expiry_time,json=expiryTime,proto3" json:"expiry_time,omitempty" json:"expiryTime" yaml:"expiryTime"` ExpiryTime string `protobuf:"bytes,4,opt,name=expiry_time,json=expiryTime,proto3" json:"expiry_time,omitempty" json:"expiryTime" yaml:"expiryTime"`
Deleted bool `protobuf:"varint,5,opt,name=deleted,proto3" json:"deleted,omitempty"` Deleted bool `protobuf:"varint,5,opt,name=deleted,proto3" json:"deleted,omitempty"`
Owners []string `protobuf:"bytes,6,rep,name=owners,proto3" json:"owners,omitempty" json:"owners" yaml:"owners"` Owners []string `protobuf:"bytes,6,rep,name=owners,proto3" json:"owners,omitempty" json:"owners" yaml:"owners"`
Attributes []byte `protobuf:"bytes,7,opt,name=attributes,proto3" json:"attributes,omitempty" json:"attributes" yaml:"attributes"` Attributes *types1.Any `protobuf:"bytes,7,opt,name=attributes,proto3" json:"attributes,omitempty" json:"attributes" yaml:"attributes"`
Names []string `protobuf:"bytes,8,rep,name=names,proto3" json:"names,omitempty" json:"names" yaml:"names"` Names []string `protobuf:"bytes,8,rep,name=names,proto3" json:"names,omitempty" json:"names" yaml:"names"`
Type string `protobuf:"bytes,9,opt,name=type,proto3" json:"type,omitempty" json:"types" yaml:"types"` Type string `protobuf:"bytes,9,opt,name=type,proto3" json:"type,omitempty" json:"types" yaml:"types"`
} }
func (m *Record) Reset() { *m = Record{} } func (m *Record) Reset() { *m = Record{} }
@ -243,7 +243,7 @@ func (m *Record) GetOwners() []string {
return nil return nil
} }
func (m *Record) GetAttributes() []byte { func (m *Record) GetAttributes() *types1.Any {
if m != nil { if m != nil {
return m.Attributes return m.Attributes
} }
@ -264,7 +264,7 @@ func (m *Record) GetType() string {
return "" return ""
} }
// AuthorityEntry defines a registry authority // AuthorityEntry defines the registry module AuthorityEntries
type AuthorityEntry struct { type AuthorityEntry struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Entry *NameAuthority `protobuf:"bytes,2,opt,name=entry,proto3" json:"entry,omitempty"` Entry *NameAuthority `protobuf:"bytes,2,opt,name=entry,proto3" json:"entry,omitempty"`
@ -466,7 +466,7 @@ func (m *NameEntry) GetEntry() *NameRecord {
return nil return nil
} }
// NameRecord defines a versioned name record // NameRecord
type NameRecord struct { type NameRecord struct {
Latest *NameRecordEntry `protobuf:"bytes,1,opt,name=latest,proto3" json:"latest,omitempty"` Latest *NameRecordEntry `protobuf:"bytes,1,opt,name=latest,proto3" json:"latest,omitempty"`
History []*NameRecordEntry `protobuf:"bytes,2,rep,name=history,proto3" json:"history,omitempty"` History []*NameRecordEntry `protobuf:"bytes,2,rep,name=history,proto3" json:"history,omitempty"`
@ -781,91 +781,92 @@ func init() {
} }
var fileDescriptor_5ca0f65a0e7121be = []byte{ var fileDescriptor_5ca0f65a0e7121be = []byte{
// 1344 bytes of a gzipped FileDescriptorProto // 1347 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x57, 0xdd, 0x6e, 0x1b, 0xc5, 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x57, 0xdd, 0x6e, 0x1b, 0x45,
0x17, 0xcf, 0xc6, 0x89, 0x13, 0x9f, 0x34, 0xf9, 0xff, 0x35, 0xa4, 0xad, 0x13, 0xa8, 0x37, 0x18, 0x14, 0xce, 0xc6, 0x89, 0x13, 0x9f, 0x34, 0x01, 0x0d, 0x69, 0xeb, 0x04, 0xea, 0x0d, 0x46, 0xa5,
0x95, 0x36, 0x84, 0xda, 0x2a, 0xbd, 0x28, 0x9f, 0x42, 0xd9, 0x24, 0x0d, 0x11, 0x02, 0xc2, 0xb4, 0x0d, 0xa1, 0xb6, 0x4a, 0x2f, 0xca, 0xaf, 0x50, 0x36, 0x49, 0x4b, 0x84, 0x80, 0x30, 0xed, 0x0d,
0x37, 0x20, 0x21, 0x6b, 0x76, 0x77, 0x6a, 0x0f, 0xf5, 0xee, 0x5a, 0xbb, 0xb3, 0xa5, 0xe6, 0x0e, 0x48, 0x95, 0x35, 0xbb, 0x3b, 0xb5, 0x87, 0x7a, 0x77, 0xad, 0xdd, 0xd9, 0x52, 0x73, 0xc7, 0x1b,
0xf1, 0x02, 0xb9, 0xec, 0x05, 0x6f, 0x00, 0x12, 0x8f, 0x41, 0x2f, 0x7b, 0x89, 0x90, 0x30, 0xa8, 0xe4, 0xb2, 0x48, 0xbc, 0x01, 0x48, 0x3c, 0x06, 0xbd, 0xec, 0x25, 0x42, 0xc2, 0xa0, 0xe6, 0x0d,
0x79, 0x03, 0x3f, 0x01, 0xda, 0xf9, 0xd8, 0x2f, 0xdb, 0x75, 0xa1, 0x77, 0x73, 0xbe, 0x7e, 0xf3, 0xfc, 0x04, 0x68, 0xe7, 0x67, 0xff, 0x6c, 0xd7, 0x85, 0xde, 0xcd, 0xf9, 0xfb, 0xe6, 0x9b, 0x33,
0x9b, 0x33, 0xe7, 0x9c, 0xd9, 0x85, 0xdd, 0x87, 0x71, 0xdf, 0x21, 0x3e, 0xfb, 0x9e, 0xb6, 0x43, 0xe7, 0x9c, 0xd9, 0x85, 0xdd, 0x87, 0x71, 0xdf, 0x21, 0x3e, 0xfb, 0x81, 0xb6, 0x43, 0xda, 0x65,
0xda, 0x65, 0x11, 0x0f, 0x87, 0xed, 0x87, 0x37, 0x6d, 0xca, 0xc9, 0xcd, 0x54, 0xd1, 0x1a, 0x84, 0x11, 0x0f, 0x87, 0xed, 0x87, 0xd7, 0x6d, 0xca, 0xc9, 0xf5, 0x54, 0xd1, 0x1a, 0x84, 0x01, 0x0f,
0x01, 0x0f, 0xd0, 0x76, 0xea, 0xda, 0x4a, 0x2d, 0xca, 0x75, 0xbb, 0xd1, 0x0d, 0x82, 0x6e, 0x9f, 0xd0, 0x76, 0xea, 0xda, 0x4a, 0x2d, 0xca, 0x75, 0xbb, 0xd1, 0x0d, 0x82, 0x6e, 0x9f, 0xb6, 0x85,
0xb6, 0x85, 0xa7, 0x1d, 0xdf, 0x6f, 0xbb, 0x71, 0x48, 0x38, 0x0b, 0x7c, 0x19, 0xbb, 0x6d, 0x96, 0xa7, 0x1d, 0xdf, 0x6f, 0xbb, 0x71, 0x48, 0x38, 0x0b, 0x7c, 0x19, 0xbb, 0x6d, 0x96, 0xed, 0x9c,
0xed, 0x9c, 0x79, 0x34, 0xe2, 0xc4, 0x1b, 0x28, 0x87, 0xcd, 0x6e, 0xd0, 0x0d, 0xc4, 0xb2, 0x9d, 0x79, 0x34, 0xe2, 0xc4, 0x1b, 0x28, 0x87, 0xcd, 0x6e, 0xd0, 0x0d, 0xc4, 0xb2, 0x9d, 0xac, 0x94,
0xac, 0x94, 0xb6, 0xe1, 0x04, 0x91, 0x17, 0x44, 0x6d, 0x9b, 0x44, 0x34, 0xa5, 0xe5, 0x04, 0x4c, 0xb6, 0xe1, 0x04, 0x91, 0x17, 0x44, 0x6d, 0x9b, 0x44, 0x34, 0xa5, 0xe5, 0x04, 0x4c, 0xc3, 0x6e,
0xc3, 0x6e, 0x95, 0x61, 0x89, 0xaf, 0xd8, 0x36, 0xff, 0x5c, 0x87, 0xea, 0x29, 0x09, 0x89, 0x17, 0x95, 0x61, 0x89, 0xaf, 0xd8, 0x36, 0xff, 0x5a, 0x87, 0xea, 0x09, 0x09, 0x89, 0x17, 0x21, 0x06,
0x21, 0x06, 0x6b, 0x21, 0x75, 0x82, 0xd0, 0xed, 0x84, 0xd4, 0xe7, 0x75, 0x63, 0xc7, 0xb8, 0xbe, 0x6b, 0x21, 0x75, 0x82, 0xd0, 0xed, 0x84, 0xd4, 0xe7, 0x75, 0x63, 0xc7, 0xb8, 0xba, 0xf6, 0xde,
0xf6, 0xce, 0x56, 0x4b, 0x62, 0xb7, 0x12, 0x6c, 0x7d, 0x8e, 0xd6, 0x41, 0xc0, 0x7c, 0xeb, 0xc6, 0x56, 0x4b, 0x62, 0xb7, 0x12, 0x6c, 0x7d, 0x8e, 0xd6, 0x41, 0xc0, 0x7c, 0xeb, 0xda, 0x93, 0x91,
0x93, 0x91, 0xb9, 0x30, 0x1e, 0x99, 0x57, 0xbf, 0x8d, 0x02, 0xff, 0xfd, 0x66, 0x2e, 0xb6, 0xb9, 0xb9, 0x30, 0x1e, 0x99, 0x97, 0xbf, 0x8b, 0x02, 0xff, 0xc3, 0x66, 0x2e, 0xb6, 0xb9, 0x33, 0x24,
0x33, 0x24, 0x5e, 0xbf, 0xa8, 0xc2, 0x20, 0x25, 0x4c, 0x7d, 0x8e, 0xce, 0x0c, 0xd8, 0xcc, 0x19, 0x5e, 0xbf, 0xa8, 0xc2, 0x20, 0x25, 0x4c, 0x7d, 0x8e, 0x4e, 0x0d, 0xd8, 0xcc, 0x19, 0x3b, 0x3a,
0x3b, 0x3a, 0x0d, 0xf5, 0x45, 0xb5, 0xa9, 0x24, 0xdc, 0xd2, 0x84, 0x5b, 0x87, 0xca, 0xc1, 0x3a, 0x0d, 0xf5, 0x45, 0xb5, 0xa9, 0x24, 0xdc, 0xd2, 0x84, 0x5b, 0x87, 0xca, 0xc1, 0x3a, 0x50, 0x9b,
0x50, 0x9b, 0xde, 0x9e, 0xd8, 0x34, 0x05, 0x99, 0xb2, 0x7b, 0x66, 0x7b, 0xfc, 0x97, 0x69, 0x60, 0xde, 0x9c, 0xd8, 0x34, 0x05, 0x99, 0xb2, 0x7b, 0x66, 0x7b, 0xfc, 0xb7, 0x69, 0x60, 0x94, 0x51,
0x94, 0x51, 0xd1, 0xc0, 0x28, 0x86, 0x0d, 0x12, 0xf3, 0x5e, 0x10, 0x32, 0x3e, 0x94, 0x09, 0xa8, 0xd1, 0xc0, 0x28, 0x86, 0x0d, 0x12, 0xf3, 0x5e, 0x10, 0x32, 0x3e, 0x94, 0x09, 0xa8, 0xcc, 0x4b,
0xcc, 0x4b, 0xc0, 0x2d, 0xc5, 0x65, 0x4f, 0x72, 0x29, 0x86, 0x6b, 0x16, 0x25, 0x2d, 0x5e, 0x4f, 0xc0, 0x0d, 0xc5, 0x65, 0x4f, 0x72, 0x29, 0x86, 0x6b, 0x16, 0x25, 0x2d, 0x5e, 0x4f, 0x15, 0x22,
0x15, 0x22, 0x13, 0x3f, 0x19, 0x70, 0xb9, 0xe8, 0x92, 0x25, 0x63, 0x69, 0x5e, 0x32, 0x4e, 0x14, 0x13, 0x3f, 0x1b, 0x70, 0xb1, 0xe8, 0x92, 0x25, 0x63, 0x69, 0x5e, 0x32, 0x8e, 0x15, 0x81, 0x4f,
0x81, 0x8f, 0xa6, 0x11, 0x98, 0xc8, 0xc7, 0x2c, 0xb3, 0x48, 0xc9, 0xc5, 0x02, 0xad, 0x34, 0x2b, 0xa6, 0x11, 0x98, 0xc8, 0xc7, 0x2c, 0xb3, 0x48, 0xc9, 0xf9, 0x02, 0xad, 0x34, 0x2b, 0x8f, 0x0d,
0x8f, 0x0d, 0xb8, 0x94, 0xc5, 0x75, 0x43, 0xe2, 0xd0, 0xce, 0x80, 0x86, 0x2c, 0x70, 0xeb, 0xcb, 0xb8, 0x90, 0xc5, 0x75, 0x43, 0xe2, 0xd0, 0xce, 0x80, 0x86, 0x2c, 0x70, 0xeb, 0xcb, 0xf3, 0xd8,
0xf3, 0xd8, 0x1d, 0x2b, 0x76, 0x1f, 0x94, 0xd9, 0xe5, 0x61, 0x26, 0xc9, 0x15, 0xac, 0x82, 0xdb, 0xdd, 0x56, 0xec, 0x3e, 0x2a, 0xb3, 0xcb, 0xc3, 0x4c, 0x92, 0x2b, 0x58, 0x05, 0xb7, 0xcd, 0xd4,
0x66, 0x6a, 0x3c, 0x4e, 0x6c, 0xa7, 0xc2, 0x84, 0x7e, 0x30, 0x60, 0x2b, 0x8b, 0x22, 0xb1, 0x93, 0x78, 0x3b, 0xb1, 0x9d, 0x08, 0x13, 0xfa, 0xd1, 0x80, 0xad, 0x2c, 0x8a, 0xc4, 0x4e, 0xb2, 0x69,
0x6c, 0xda, 0xa1, 0x3e, 0xb1, 0xfb, 0xd4, 0xad, 0x57, 0x77, 0x8c, 0xeb, 0xab, 0xd6, 0xd1, 0x78, 0x87, 0xfa, 0xc4, 0xee, 0x53, 0xb7, 0x5e, 0xdd, 0x31, 0xae, 0xae, 0x5a, 0x47, 0xe3, 0x91, 0xb9,
0x64, 0xee, 0x97, 0xb7, 0x2f, 0xb9, 0x4e, 0x32, 0x28, 0x3b, 0xe0, 0xec, 0x86, 0xf6, 0xa5, 0xe9, 0x5f, 0xde, 0xbe, 0xe4, 0x3a, 0xc9, 0xa0, 0xec, 0x80, 0xb3, 0x1b, 0xda, 0x97, 0xa6, 0x23, 0x69,
0x48, 0x5a, 0xd0, 0x6f, 0x06, 0x4c, 0x89, 0x73, 0x02, 0xcf, 0x63, 0x3c, 0xca, 0x2e, 0x72, 0x65, 0x41, 0xbf, 0x1b, 0x30, 0x25, 0xce, 0x09, 0x3c, 0x8f, 0xf1, 0x28, 0xbb, 0xc8, 0x95, 0x79, 0xa9,
0x5e, 0xaa, 0x3a, 0x2a, 0x55, 0x77, 0x67, 0x71, 0x2d, 0x43, 0xce, 0x26, 0x3d, 0xe1, 0x29, 0x52, 0xea, 0xa8, 0x54, 0xdd, 0x99, 0xc5, 0xb5, 0x0c, 0x39, 0x9b, 0xf4, 0x84, 0xa7, 0x48, 0xa1, 0x59,
0x68, 0x96, 0x4f, 0x70, 0x20, 0xdd, 0xd2, 0x8b, 0x9e, 0x7e, 0x92, 0x90, 0x3e, 0xa4, 0xa4, 0x9f, 0x3e, 0xc1, 0x81, 0x74, 0x4b, 0x2f, 0x7a, 0xfa, 0x49, 0x42, 0xfa, 0x90, 0x92, 0x7e, 0xee, 0x24,
0x3b, 0xc9, 0xea, 0x4b, 0x9f, 0xa4, 0x0c, 0x39, 0xfb, 0x24, 0x13, 0x9e, 0xd3, 0x4f, 0x82, 0xa5, 0xab, 0x2f, 0x7d, 0x92, 0x32, 0xe4, 0xec, 0x93, 0x4c, 0x78, 0x4e, 0x3f, 0x09, 0x96, 0x6e, 0xe9,
0x5b, 0x7a, 0x92, 0x9f, 0x0d, 0x78, 0x6d, 0x56, 0x5a, 0x3a, 0xf7, 0x29, 0xad, 0xd7, 0xe6, 0xf5, 0x49, 0x7e, 0x31, 0xe0, 0x8d, 0x59, 0x69, 0xe9, 0xdc, 0xa7, 0xb4, 0x5e, 0x9b, 0xd7, 0xd7, 0x5f,
0xf5, 0x17, 0xea, 0x0c, 0xc7, 0xcf, 0xbf, 0x8d, 0x04, 0x6c, 0xde, 0x3d, 0x08, 0x1f, 0xbc, 0x35, 0xa9, 0x33, 0xdc, 0x7e, 0xfe, 0x6d, 0x24, 0x60, 0xf3, 0xee, 0x41, 0xf8, 0xe0, 0xad, 0xe9, 0xd9,
0x3d, 0xfb, 0x77, 0x28, 0x9d, 0xc1, 0x56, 0x1e, 0x5d, 0xb0, 0x85, 0x97, 0x66, 0x9b, 0x81, 0xcd, 0xbf, 0x45, 0xe9, 0x0c, 0xb6, 0xf2, 0xe8, 0x82, 0x2d, 0xbc, 0x34, 0xdb, 0x0c, 0x6c, 0x5e, 0xae,
0xcb, 0xf5, 0x0c, 0xb6, 0x32, 0xc3, 0x09, 0xdb, 0x5f, 0x0d, 0xb8, 0x32, 0x19, 0xec, 0x31, 0x9f, 0x67, 0xb0, 0x95, 0x19, 0x4e, 0xd8, 0xfe, 0x66, 0xc0, 0xa5, 0xc9, 0x60, 0x8f, 0xf9, 0xcc, 0x8b,
0x79, 0xb1, 0xd7, 0xb1, 0x99, 0x5b, 0x5f, 0x9b, 0x47, 0xf7, 0x4b, 0x45, 0xf7, 0x64, 0x16, 0xdd, 0xbd, 0x8e, 0xcd, 0xdc, 0xfa, 0xda, 0x3c, 0xba, 0x5f, 0x2b, 0xba, 0xc7, 0xb3, 0xe8, 0xe6, 0xd0,
0x1c, 0xda, 0x6c, 0xbe, 0x79, 0x27, 0xbc, 0x5d, 0x26, 0xfc, 0x99, 0xb4, 0x5a, 0xcc, 0x6d, 0xfe, 0x66, 0xf3, 0xcd, 0x3b, 0xe1, 0xed, 0x32, 0xe1, 0x2f, 0xa4, 0xd5, 0x62, 0x6e, 0xf3, 0xa7, 0x25,
0xb8, 0x04, 0x55, 0x2c, 0xa6, 0x3d, 0xba, 0x06, 0x8b, 0xcc, 0x15, 0xcf, 0x5a, 0xcd, 0xba, 0x3c, 0xa8, 0x62, 0x31, 0xed, 0xd1, 0x15, 0x58, 0x64, 0xae, 0x78, 0xd6, 0x6a, 0xd6, 0xc5, 0xf1, 0xc8,
0x1e, 0x99, 0xaf, 0x48, 0x06, 0xd9, 0x36, 0x09, 0xd6, 0x22, 0x73, 0xd1, 0xbb, 0xb0, 0x62, 0x07, 0x7c, 0x4d, 0x32, 0xc8, 0xb6, 0x49, 0xb0, 0x16, 0x99, 0x8b, 0xde, 0x87, 0x15, 0x3b, 0xf0, 0xdd,
0xbe, 0xdb, 0x61, 0xae, 0x78, 0x8f, 0x6a, 0x96, 0x39, 0x1e, 0x99, 0xaf, 0x4a, 0xef, 0xc4, 0x70, 0x0e, 0x73, 0xc5, 0x7b, 0x54, 0xb3, 0xcc, 0xf1, 0xc8, 0x7c, 0x5d, 0x7a, 0x27, 0x86, 0xe3, 0x34,
0x92, 0x46, 0x28, 0x09, 0x57, 0xe5, 0x02, 0x7d, 0x02, 0x6b, 0x4e, 0x48, 0x09, 0xa7, 0x9d, 0xe4, 0x42, 0x49, 0xb8, 0x2a, 0x17, 0xe8, 0x33, 0x58, 0x73, 0x42, 0x4a, 0x38, 0xed, 0x24, 0x0f, 0xb7,
0xe1, 0x16, 0x2f, 0x48, 0xcd, 0xba, 0x36, 0x1e, 0x99, 0x6f, 0xc8, 0x68, 0x69, 0xbc, 0xc7, 0xbc, 0x78, 0x41, 0x6a, 0xd6, 0x95, 0xf1, 0xc8, 0x7c, 0x4b, 0x46, 0x4b, 0xe3, 0x5d, 0xe6, 0xa5, 0x57,
0xf4, 0x2a, 0x72, 0x1a, 0x0c, 0x99, 0x90, 0x20, 0xd1, 0x47, 0x03, 0x16, 0x0e, 0x25, 0xd2, 0x52, 0x91, 0xd3, 0x60, 0xc8, 0x84, 0x04, 0x89, 0x3e, 0x1a, 0xb0, 0x70, 0x28, 0x91, 0x96, 0xca, 0x48,
0x19, 0x49, 0x1a, 0xf3, 0x48, 0x39, 0x0d, 0x86, 0x4c, 0x40, 0x75, 0x58, 0x71, 0x69, 0x9f, 0x72, 0xd2, 0x98, 0x47, 0xca, 0x69, 0x30, 0x64, 0x02, 0xaa, 0xc3, 0x8a, 0x4b, 0xfb, 0x94, 0x53, 0x39,
0x2a, 0x47, 0xf6, 0x2a, 0xd6, 0x22, 0xba, 0x0d, 0xd5, 0xe0, 0x3b, 0x9f, 0x86, 0x51, 0xbd, 0xba, 0xb2, 0x57, 0xb1, 0x16, 0xd1, 0x4d, 0xa8, 0x06, 0xdf, 0xfb, 0x34, 0x8c, 0xea, 0xd5, 0x9d, 0x4a,
0x53, 0x29, 0x1e, 0x53, 0xea, 0x35, 0xb4, 0x92, 0xb0, 0x72, 0x47, 0xc7, 0x00, 0x84, 0xf3, 0x90, 0xf1, 0x98, 0x52, 0xaf, 0xa1, 0x95, 0x84, 0x95, 0x3b, 0xba, 0x07, 0x40, 0x38, 0x0f, 0x99, 0x1d,
0xd9, 0x31, 0xa7, 0x91, 0x98, 0x6e, 0x17, 0xf2, 0xdc, 0x32, 0x5b, 0x7a, 0x81, 0x99, 0x06, 0xe7, 0x73, 0x1a, 0xa9, 0xe9, 0xb6, 0x39, 0x31, 0x13, 0xf6, 0xfd, 0x61, 0x9e, 0x71, 0x16, 0x91, 0x5e,
0x42, 0xd1, 0x2d, 0x58, 0xf6, 0x89, 0x47, 0xa3, 0xfa, 0xaa, 0x20, 0x70, 0x65, 0x3c, 0x32, 0xb7, 0x6b, 0xa6, 0xc1, 0x39, 0x40, 0x74, 0x03, 0x96, 0x7d, 0xe2, 0xd1, 0xa8, 0xbe, 0x2a, 0x68, 0x5d,
0x24, 0x86, 0x50, 0xeb, 0x70, 0x29, 0x60, 0xe9, 0x8b, 0x6e, 0xc2, 0x12, 0x1f, 0x0e, 0x64, 0x1f, 0x1a, 0x8f, 0xcc, 0x2d, 0x89, 0x21, 0xd4, 0x3a, 0x5c, 0x0a, 0x58, 0xfa, 0xa2, 0xeb, 0xb0, 0xc4,
0x17, 0x62, 0x12, 0x6d, 0x1a, 0x23, 0x05, 0x2c, 0x5c, 0x9b, 0x14, 0x36, 0xf6, 0x75, 0x8d, 0x1c, 0x87, 0x03, 0xd9, 0xdd, 0x85, 0x98, 0x44, 0x9b, 0xc6, 0x48, 0x01, 0x0b, 0xd7, 0x26, 0x85, 0x8d,
0xf9, 0x3c, 0x1c, 0x22, 0x04, 0x4b, 0x09, 0x9a, 0x2c, 0x07, 0x2c, 0xd6, 0xe8, 0x63, 0x58, 0xa6, 0x7d, 0x5d, 0x39, 0x47, 0x3e, 0x0f, 0x87, 0x08, 0xc1, 0x52, 0x82, 0x26, 0x8b, 0x04, 0x8b, 0x35,
0x89, 0x51, 0x7d, 0x85, 0xec, 0xb6, 0x66, 0x7f, 0xc9, 0xb5, 0x3e, 0x27, 0x1e, 0x4d, 0x21, 0xb1, 0xfa, 0x14, 0x96, 0x69, 0x62, 0x54, 0xdf, 0x26, 0xbb, 0xad, 0xd9, 0xdf, 0x77, 0xad, 0x2f, 0x89,
0x8c, 0x6b, 0xfe, 0x51, 0x81, 0xf5, 0x82, 0x01, 0x7d, 0x05, 0xff, 0x17, 0x39, 0xeb, 0x0c, 0x62, 0x47, 0x53, 0x48, 0x2c, 0xe3, 0x9a, 0x7f, 0x56, 0x60, 0xbd, 0x60, 0x40, 0xdf, 0xc0, 0xab, 0x22,
0xbb, 0xcf, 0x9c, 0xce, 0x03, 0x3a, 0x54, 0x15, 0xd8, 0xce, 0x3e, 0x1c, 0x84, 0xc7, 0xa9, 0x70, 0x93, 0x9d, 0x41, 0x6c, 0xf7, 0x99, 0xd3, 0x79, 0x40, 0x87, 0xaa, 0x2e, 0xdb, 0xd9, 0xe7, 0x84,
0xf8, 0x94, 0x0e, 0x0b, 0x49, 0xcf, 0xb4, 0x78, 0xa3, 0xa8, 0x40, 0xa7, 0xb0, 0x2e, 0xa1, 0x89, 0xf0, 0x38, 0x11, 0x0e, 0x9f, 0xd3, 0x61, 0xe1, 0x2a, 0x32, 0x2d, 0xde, 0x28, 0x2a, 0xd0, 0x09,
0xeb, 0x86, 0x34, 0x8a, 0x54, 0xad, 0xee, 0x8d, 0x47, 0xe6, 0xb5, 0x1c, 0xee, 0xbe, 0xb4, 0x16, 0xac, 0x4b, 0x68, 0xe2, 0xba, 0x21, 0x8d, 0x22, 0x55, 0xc1, 0x7b, 0xe3, 0x91, 0x79, 0x25, 0x87,
0x50, 0xb5, 0x0e, 0x5f, 0xc8, 0x8b, 0xe8, 0x12, 0x54, 0x7b, 0x94, 0x75, 0x7b, 0xf2, 0xd3, 0x67, 0xbb, 0x2f, 0xad, 0x05, 0x54, 0xad, 0xc3, 0xe7, 0xf2, 0x22, 0xba, 0x00, 0xd5, 0x1e, 0x65, 0xdd,
0x09, 0x2b, 0x29, 0xd1, 0x47, 0x9c, 0xf0, 0x38, 0x92, 0x65, 0x88, 0x95, 0x84, 0x0e, 0x01, 0x74, 0x9e, 0xfc, 0x20, 0x5a, 0xc2, 0x4a, 0x4a, 0xf4, 0x11, 0x27, 0x3c, 0x8e, 0x64, 0x71, 0x62, 0x25,
0x3f, 0x32, 0x59, 0x5c, 0x35, 0xeb, 0xea, 0x78, 0x64, 0xbe, 0xae, 0x5b, 0x5b, 0xd8, 0x4e, 0x0e, 0xa1, 0x43, 0x00, 0xdd, 0xa5, 0x4c, 0x96, 0x5c, 0xcd, 0xba, 0x3c, 0x1e, 0x99, 0x6f, 0xea, 0x86,
0xb3, 0x36, 0xd6, 0x0a, 0x5c, 0xd3, 0xeb, 0x42, 0xb7, 0x55, 0xa7, 0x76, 0xdb, 0x61, 0xa1, 0xdb, 0x17, 0xb6, 0xe3, 0xc3, 0xac, 0xb9, 0xb5, 0x02, 0xd7, 0xf4, 0xba, 0xd0, 0x83, 0xd5, 0xa9, 0x3d,
0x0e, 0xb3, 0x6e, 0xeb, 0x17, 0x7b, 0x44, 0xbe, 0xb2, 0xdb, 0x13, 0x6f, 0xd3, 0x3d, 0xfd, 0x0d, 0x78, 0x58, 0xe8, 0xc1, 0xc3, 0xac, 0x07, 0xfb, 0xc5, 0xce, 0x91, 0xd5, 0xb9, 0x3d, 0x51, 0x9d,
0x6d, 0xb5, 0xd5, 0xec, 0x79, 0x91, 0x1e, 0x3a, 0x4b, 0x1e, 0x9b, 0x5c, 0x1f, 0x35, 0xbf, 0x81, 0x77, 0xf5, 0x97, 0xb5, 0xd5, 0x56, 0x13, 0xe9, 0x45, 0x3a, 0xeb, 0x34, 0x79, 0x82, 0x72, 0xdd,
0x5a, 0x72, 0xb7, 0xb3, 0xcb, 0xe7, 0xc3, 0x62, 0xf9, 0xbc, 0x39, 0xaf, 0x7c, 0xe4, 0x58, 0xd2, 0xd5, 0xbc, 0x07, 0xb5, 0xe4, 0x6e, 0x67, 0x97, 0xcf, 0xc7, 0xc5, 0xf2, 0x79, 0x7b, 0x5e, 0xf9,
0xb5, 0xf3, 0xd8, 0x00, 0xc8, 0xb4, 0xe8, 0x00, 0xaa, 0x7d, 0xc2, 0x69, 0xa4, 0xbf, 0xc3, 0xf7, 0xc8, 0x61, 0xa5, 0x6b, 0xe7, 0xb1, 0x01, 0x90, 0x69, 0xd1, 0x01, 0x54, 0xfb, 0x84, 0xd3, 0x48,
0x5e, 0x0c, 0x4d, 0xb0, 0xc3, 0x2a, 0x14, 0x1d, 0xc1, 0x4a, 0x8f, 0x45, 0x3c, 0x10, 0x9c, 0x2a, 0x7f, 0x9d, 0xef, 0xbd, 0x18, 0x9a, 0x60, 0x87, 0x55, 0x28, 0x3a, 0x82, 0x95, 0x1e, 0x8b, 0x78,
0xff, 0x16, 0x45, 0xc7, 0x36, 0xdf, 0x83, 0xff, 0x95, 0x6c, 0x68, 0x23, 0x9b, 0xa5, 0x62, 0x64, 0x20, 0x38, 0x55, 0xfe, 0x2b, 0x8a, 0x8e, 0x6d, 0x7e, 0x00, 0xaf, 0x94, 0x6c, 0x68, 0x23, 0x9b,
0x66, 0xa5, 0xb3, 0x98, 0x2f, 0x9d, 0x66, 0x08, 0xb5, 0xbb, 0xac, 0xeb, 0x13, 0x1e, 0x87, 0x14, 0xb0, 0x62, 0x90, 0x66, 0xa5, 0xb3, 0x98, 0x2f, 0x9d, 0x66, 0x08, 0xb5, 0x3b, 0xac, 0xeb, 0x13,
0xed, 0x41, 0x25, 0x62, 0x5d, 0x55, 0xff, 0x5b, 0xe3, 0x91, 0x79, 0x51, 0xde, 0x43, 0xc4, 0xba, 0x1e, 0x87, 0x14, 0xed, 0x41, 0x25, 0x62, 0x5d, 0x55, 0xff, 0x5b, 0xe3, 0x91, 0x79, 0x5e, 0xde,
0xfa, 0x02, 0x92, 0x25, 0x4e, 0xbc, 0x92, 0xb2, 0x18, 0xc4, 0xb6, 0x68, 0x98, 0x89, 0x21, 0x3c, 0x43, 0xc4, 0xba, 0xfa, 0x02, 0x92, 0x25, 0x4e, 0xbc, 0x92, 0xb2, 0x18, 0xc4, 0xb6, 0x68, 0x98,
0x88, 0xed, 0x5c, 0xa3, 0x28, 0x09, 0x57, 0xd5, 0xe2, 0x6c, 0x11, 0x36, 0xac, 0x7e, 0xe0, 0x3c, 0x89, 0xd1, 0x3c, 0x88, 0xed, 0x5c, 0xa3, 0x28, 0x09, 0x57, 0xd5, 0xe2, 0x74, 0x11, 0x36, 0xac,
0x38, 0xe8, 0x11, 0xbf, 0x4b, 0xef, 0x52, 0x9e, 0xa3, 0x97, 0x6c, 0x5e, 0x49, 0x2b, 0xbb, 0x0e, 0x7e, 0xe0, 0x3c, 0x38, 0xe8, 0x11, 0xbf, 0x4b, 0xef, 0x50, 0x9e, 0xa3, 0x97, 0x6c, 0x5e, 0x49,
0x2b, 0xf2, 0x57, 0x20, 0x12, 0x09, 0xaa, 0x61, 0x2d, 0xa2, 0x6d, 0x58, 0x55, 0x25, 0x1a, 0xd5, 0x2b, 0xbb, 0x0e, 0x2b, 0xf2, 0x07, 0x21, 0x12, 0x09, 0xaa, 0x61, 0x2d, 0xa2, 0x6d, 0x58, 0x55,
0x2b, 0xc2, 0x94, 0xca, 0xe8, 0x11, 0x5c, 0xd0, 0x75, 0x6f, 0x33, 0x37, 0xe9, 0x8a, 0x24, 0xb7, 0x25, 0x1a, 0xd5, 0x2b, 0xc2, 0x94, 0xca, 0xe8, 0x11, 0x9c, 0xd3, 0x75, 0x6f, 0x33, 0x37, 0xe9,
0x6f, 0x3d, 0x2f, 0xb7, 0xea, 0x61, 0xb2, 0x98, 0x7b, 0xe2, 0xdf, 0x0f, 0xac, 0xdd, 0xec, 0xb7, 0x8a, 0x24, 0xb7, 0xef, 0x3c, 0x2f, 0xb7, 0xea, 0xb9, 0xb2, 0x98, 0x7b, 0xec, 0xdf, 0x0f, 0xac,
0x89, 0xa4, 0x96, 0xa8, 0xd4, 0x27, 0x42, 0x85, 0xd7, 0x72, 0x12, 0xda, 0x81, 0x35, 0xfd, 0xd6, 0xdd, 0xec, 0x67, 0x8a, 0xa4, 0x96, 0xa8, 0xd4, 0x27, 0x42, 0x85, 0xd7, 0x72, 0x12, 0xda, 0x81,
0x31, 0x1a, 0xd5, 0x97, 0x05, 0xb1, 0xbc, 0x0a, 0x6d, 0xea, 0x89, 0x2a, 0x46, 0xba, 0x1a, 0x99, 0x35, 0xfd, 0x02, 0x32, 0x1a, 0xd5, 0x97, 0x05, 0xb1, 0xbc, 0x0a, 0x6d, 0xea, 0x89, 0x2a, 0x06,
0xcd, 0x5f, 0x8c, 0x64, 0x00, 0xe6, 0x29, 0x94, 0x9a, 0xd7, 0xf8, 0x8f, 0xcd, 0x7b, 0x0f, 0x36, 0xbd, 0x1a, 0x99, 0xcd, 0x5f, 0x8d, 0x64, 0x00, 0xe6, 0x29, 0x94, 0x9a, 0xd7, 0xf8, 0x9f, 0xcd,
0x6c, 0xe6, 0xba, 0x13, 0x53, 0xe8, 0xc6, 0x78, 0x64, 0xee, 0xaa, 0x1e, 0x16, 0xf6, 0xd2, 0x18, 0x7b, 0x17, 0x36, 0x6c, 0xe6, 0xba, 0x13, 0x53, 0xe8, 0xda, 0x78, 0x64, 0xee, 0xaa, 0x1e, 0x16,
0x2a, 0x2a, 0xf1, 0x7a, 0x41, 0xb6, 0xee, 0x3c, 0x79, 0xd6, 0x30, 0x9e, 0x3e, 0x6b, 0x18, 0x7f, 0xf6, 0xd2, 0x18, 0x2a, 0x2a, 0xf1, 0x7a, 0x41, 0xb6, 0x6e, 0x3d, 0x79, 0xd6, 0x30, 0x9e, 0x3e,
0x3f, 0x6b, 0x18, 0x67, 0xe7, 0x8d, 0x85, 0xa7, 0xe7, 0x8d, 0x85, 0xdf, 0xcf, 0x1b, 0x0b, 0x5f, 0x6b, 0x18, 0xff, 0x3c, 0x6b, 0x18, 0xa7, 0x67, 0x8d, 0x85, 0xa7, 0x67, 0x8d, 0x85, 0x3f, 0xce,
0xbf, 0xdd, 0x65, 0xbc, 0x17, 0xdb, 0x2d, 0x27, 0xf0, 0xda, 0x0e, 0x0d, 0x9d, 0x1b, 0x2c, 0x68, 0x1a, 0x0b, 0xdf, 0xbe, 0xdb, 0x65, 0xbc, 0x17, 0xdb, 0x2d, 0x27, 0xf0, 0xda, 0x0e, 0x0d, 0x9d,
0xf7, 0x89, 0x13, 0xf8, 0xcc, 0x71, 0xdb, 0x8f, 0xb2, 0x7f, 0x73, 0x31, 0xfd, 0xed, 0xaa, 0x98, 0x6b, 0x2c, 0x68, 0xf7, 0x89, 0x13, 0xf8, 0xcc, 0x71, 0xdb, 0x8f, 0xb2, 0x3f, 0x76, 0x31, 0xfd,
0x01, 0xb7, 0xfe, 0x09, 0x00, 0x00, 0xff, 0xff, 0x03, 0x19, 0xcc, 0xfc, 0xbe, 0x0f, 0x00, 0x00, 0xed, 0xaa, 0x98, 0x01, 0x37, 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xd7, 0x03, 0x2e, 0xc4, 0xd4,
0x0f, 0x00, 0x00,
} }
func (m *Params) Marshal() (dAtA []byte, err error) { func (m *Params) Marshal() (dAtA []byte, err error) {
@ -1027,10 +1028,15 @@ func (m *Record) MarshalToSizedBuffer(dAtA []byte) (int, error) {
dAtA[i] = 0x42 dAtA[i] = 0x42
} }
} }
if len(m.Attributes) > 0 { if m.Attributes != nil {
i -= len(m.Attributes) {
copy(dAtA[i:], m.Attributes) size, err := m.Attributes.MarshalToSizedBuffer(dAtA[:i])
i = encodeVarintRegistry(dAtA, i, uint64(len(m.Attributes))) if err != nil {
return 0, err
}
i -= size
i = encodeVarintRegistry(dAtA, i, uint64(size))
}
i-- i--
dAtA[i] = 0x3a dAtA[i] = 0x3a
} }
@ -1146,12 +1152,12 @@ func (m *NameAuthority) MarshalToSizedBuffer(dAtA []byte) (int, error) {
_ = i _ = i
var l int var l int
_ = l _ = l
n12, err12 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.ExpiryTime, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.ExpiryTime):]) n13, err13 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.ExpiryTime, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.ExpiryTime):])
if err12 != nil { if err13 != nil {
return 0, err12 return 0, err13
} }
i -= n12 i -= n13
i = encodeVarintRegistry(dAtA, i, uint64(n12)) i = encodeVarintRegistry(dAtA, i, uint64(n13))
i-- i--
dAtA[i] = 0x3a dAtA[i] = 0x3a
if len(m.BondId) > 0 { if len(m.BondId) > 0 {
@ -1549,8 +1555,8 @@ func (m *Record) Size() (n int) {
n += 1 + l + sovRegistry(uint64(l)) n += 1 + l + sovRegistry(uint64(l))
} }
} }
l = len(m.Attributes) if m.Attributes != nil {
if l > 0 { l = m.Attributes.Size()
n += 1 + l + sovRegistry(uint64(l)) n += 1 + l + sovRegistry(uint64(l))
} }
if len(m.Names) > 0 { if len(m.Names) > 0 {
@ -2364,7 +2370,7 @@ func (m *Record) Unmarshal(dAtA []byte) error {
if wireType != 2 { if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Attributes", wireType) return fmt.Errorf("proto: wrong wireType = %d for field Attributes", wireType)
} }
var byteLen int var msglen int
for shift := uint(0); ; shift += 7 { for shift := uint(0); ; shift += 7 {
if shift >= 64 { if shift >= 64 {
return ErrIntOverflowRegistry return ErrIntOverflowRegistry
@ -2374,24 +2380,26 @@ func (m *Record) Unmarshal(dAtA []byte) error {
} }
b := dAtA[iNdEx] b := dAtA[iNdEx]
iNdEx++ iNdEx++
byteLen |= int(b&0x7F) << shift msglen |= int(b&0x7F) << shift
if b < 0x80 { if b < 0x80 {
break break
} }
} }
if byteLen < 0 { if msglen < 0 {
return ErrInvalidLengthRegistry return ErrInvalidLengthRegistry
} }
postIndex := iNdEx + byteLen postIndex := iNdEx + msglen
if postIndex < 0 { if postIndex < 0 {
return ErrInvalidLengthRegistry return ErrInvalidLengthRegistry
} }
if postIndex > l { if postIndex > l {
return io.ErrUnexpectedEOF return io.ErrUnexpectedEOF
} }
m.Attributes = append(m.Attributes[:0], dAtA[iNdEx:postIndex]...)
if m.Attributes == nil { if m.Attributes == nil {
m.Attributes = []byte{} m.Attributes = &types1.Any{}
}
if err := m.Attributes.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
} }
iNdEx = postIndex iNdEx = postIndex
case 8: case 8:

Some files were not shown because too many files have changed in this diff Show More