diff --git a/.gitignore b/.gitignore index 4321fb8d05..c97bf48dda 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ examples/build/* examples/basecoin/glide.lock examples/basecoin/app/data baseapp/data/* +docs/_build ### Vagrant ### .vagrant/ diff --git a/Dockerfile.dev b/Dockerfile.dev deleted file mode 100644 index c3b1058987..0000000000 --- a/Dockerfile.dev +++ /dev/null @@ -1,14 +0,0 @@ -FROM golang:latest - -RUN apt-get update && apt-get install -y jq - -RUN mkdir -p /go/src/github.com/tendermint/basecoin -WORKDIR /go/src/github.com/tendermint/basecoin - -COPY Makefile /go/src/github.com/tendermint/basecoin/ -COPY glide.yaml /go/src/github.com/tendermint/basecoin/ -COPY glide.lock /go/src/github.com/tendermint/basecoin/ - -RUN make get_vendor_deps - -COPY . /go/src/github.com/tendermint/basecoin diff --git a/Makefile b/Makefile index a238e7e7c6..21f4a2f138 100644 --- a/Makefile +++ b/Makefile @@ -68,13 +68,7 @@ test_unit: @go test $(PACKAGES) test_cover: - @bash test_cover.sh - -test_tutorial: - @shelldown ${TUTORIALS} - @for script in docs/guide/*.sh ; do \ - bash $$script ; \ - done + @bash tests/test_cover.sh benchmark: @go test -bench=. $(PACKAGES) diff --git a/RELEASE.md b/RELEASE.md deleted file mode 100644 index f45008261d..0000000000 --- a/RELEASE.md +++ /dev/null @@ -1,56 +0,0 @@ -# Release Process - -Basecoin is the heart of most demo apps and the testnets, but the last few releases have been a little chaotic. In order to guarantee a higher, production-quality release in the future, we will work on a release process to check before the push to master. This is a work-in-progress and should be trialed on the 0.6.x patches, and used for the 0.7.0 release. - -This is a rough-guide. Please add comments here, let's try it out for 0.6.1 and see what is annoying and useless, and what is missing and useful. - -## Planning - -* Create issues (and invite others to do so) -* Create WIP PR for release as placeholder - * Clarify scope of release in text -* Create labels, eg. (0.6.1 and 0.6.x) -* Tag all issues for this release with 0.6.1 - * Other, less urgent enhancements should get the 0.6.x label - -## Coding - -* Freeze tagging more issues for this release - * Update PR to note this - * If you want an exception, you need a good excuse ;) -* Handle all issues - * Write code - * Update CHANGELOG - * Review and merge -* Update version -* Remove WIP flag on PR -* Organize QA -* Prepare blog post (optional for patch/bugfix releases?) - -## QA - -Once we have a PR for the release and think it is ready, we should test it out internally: - -* Code review - * Hopefully dealt with by individual code reviews on the merged issues - * A general run-through is always good to find dead-code, things to cleanup -* Review blog post (and run-through) -* Manual run-through of tutorials (and feedback on bad UX) -* Deployment of a private testnet, multiple users test out manually (feedback on bugs, or annoying UX) -* Test out upgrading existing testnet from last version, document or add tools for easier upgrade. -* If problems arrise here: - * Create bugfix issues - * Fix them - * Repeat QA - -## Release - -Once QA passes, we need to orchestrate the release. - -* Merge to master -* Set all glide dependencies to proper master versions of repos -* Push code with new version tag -* Link CHANGELOG to the [github release](https://github.com/tendermint/basecoin/releases) -* Package up new version as binaries (and upload to s3) -* Upgrade our public-facing testnets with the latest versions -* Release blog post diff --git a/TODO b/TODO deleted file mode 100644 index 427a299698..0000000000 --- a/TODO +++ /dev/null @@ -1,9 +0,0 @@ - -* global state dumper. - for developer to list accounts, etc. - e.g. what does the world look like? - cmd cli. - -* something that can list transactions ... - make all decorators actually use the logger - so you can see all the txs and see what's going on diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000000..f4bccf3bd3 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = python -msphinx +SPHINXPROJ = Cosmos-SDK +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/basecoin/basics.rst b/docs/basecoin/basics.rst new file mode 100644 index 0000000000..d3627b2b10 --- /dev/null +++ b/docs/basecoin/basics.rst @@ -0,0 +1,289 @@ +Basecoin Basics +=============== + +Here we explain how to get started with a basic Basecoin blockchain, how +to send transactions between accounts using the ``basecoin`` tool, and +what is happening under the hood. + +Install +------- + +With go, it's one command: + +:: + + go get -u github.com/cosmos/cosmos-sdk + +If you have trouble, see the `installation guide <./install.html>`__. + +TODO: update all the below + +Generate some keys +~~~~~~~~~~~~~~~~~~ + +Let's generate two keys, one to receive an initial allocation of coins, +and one to send some coins to later: + +:: + + basecli keys new cool + basecli keys new friend + +You'll need to enter passwords. You can view your key names and +addresses with ``basecli keys list``, or see a particular key's address +with ``basecli keys get ``. + +Initialize Basecoin +------------------- + +To initialize a new Basecoin blockchain, run: + +:: + + basecoin init
+ +If you prefer not to copy-paste, you can provide the address +programatically: + +:: + + basecoin init $(basecli keys get cool | awk '{print $2}') + +This will create the necessary files for a Basecoin blockchain with one +validator and one account (corresponding to your key) in +``~/.basecoin``. For more options on setup, see the `guide to using the +Basecoin tool `__. + +If you like, you can manually add some more accounts to the blockchain +by generating keys and editing the ``~/.basecoin/genesis.json``. + +Start Basecoin +~~~~~~~~~~~~~~ + +Now we can start Basecoin: + +:: + + basecoin start + +You should see blocks start streaming in! + +Initialize Light-Client +----------------------- + +Now that Basecoin is running we can initialize ``basecli``, the +light-client utility. Basecli is used for sending transactions and +querying the state. Leave Basecoin running and open a new terminal +window. Here run: + +:: + + basecli init --node=tcp://localhost:46657 --genesis=$HOME/.basecoin/genesis.json + +If you provide the genesis file to basecli, it can calculate the proper +chainID and validator hash. Basecli needs to get this information from +some trusted source, so all queries done with ``basecli`` can be +cryptographically proven to be correct according to a known validator +set. + +Note: that ``--genesis`` only works if there have been no validator set +changes since genesis. If there are validator set changes, you need to +find the current set through some other method. + +Send transactions +~~~~~~~~~~~~~~~~~ + +Now we are ready to send some transactions. First Let's check the +balance of the two accounts we setup earlier: + +:: + + ME=$(basecli keys get cool | awk '{print $2}') + YOU=$(basecli keys get friend | awk '{print $2}') + basecli query account $ME + basecli query account $YOU + +The first account is flush with cash, while the second account doesn't +exist. Let's send funds from the first account to the second: + +:: + + basecli tx send --name=cool --amount=1000mycoin --to=$YOU --sequence=1 + +Now if we check the second account, it should have ``1000`` 'mycoin' +coins! + +:: + + basecli query account $YOU + +We can send some of these coins back like so: + +:: + + basecli tx send --name=friend --amount=500mycoin --to=$ME --sequence=1 + +Note how we use the ``--name`` flag to select a different account to +send from. + +If we try to send too much, we'll get an error: + +:: + + basecli tx send --name=friend --amount=500000mycoin --to=$ME --sequence=2 + +Let's send another transaction: + +:: + + basecli tx send --name=cool --amount=2345mycoin --to=$YOU --sequence=2 + +Note the ``hash`` value in the response - this is the hash of the +transaction. We can query for the transaction by this hash: + +:: + + basecli query tx + +See ``basecli tx send --help`` for additional details. + +Proof +----- + +Even if you don't see it in the UI, the result of every query comes with +a proof. This is a Merkle proof that the result of the query is actually +contained in the state. And the state's Merkle root is contained in a +recent block header. Behind the scenes, ``countercli`` will not only +verify that this state matches the header, but also that the header is +properly signed by the known validator set. It will even update the +validator set as needed, so long as there have not been major changes +and it is secure to do so. So, if you wonder why the query may take a +second... there is a lot of work going on in the background to make sure +even a lying full node can't trick your client. + +Accounts and Transactions +------------------------- + +For a better understanding of how to further use the tools, it helps to +understand the underlying data structures. + +Accounts +~~~~~~~~ + +The Basecoin state consists entirely of a set of accounts. Each account +contains a public key, a balance in many different coin denominations, +and a strictly increasing sequence number for replay protection. This +type of account was directly inspired by accounts in Ethereum, and is +unlike Bitcoin's use of Unspent Transaction Outputs (UTXOs). Note +Basecoin is a multi-asset cryptocurrency, so each account can have many +different kinds of tokens. + +:: + + type Account struct { + PubKey crypto.PubKey `json:"pub_key"` // May be nil, if not known. + Sequence int `json:"sequence"` + Balance Coins `json:"coins"` + } + + type Coins []Coin + + type Coin struct { + Denom string `json:"denom"` + Amount int64 `json:"amount"` + } + +If you want to add more coins to a blockchain, you can do so manually in +the ``~/.basecoin/genesis.json`` before you start the blockchain for the +first time. + +Accounts are serialized and stored in a Merkle tree under the key +``base/a/
``, where ``
`` is the address of the account. +Typically, the address of the account is the 20-byte ``RIPEMD160`` hash +of the public key, but other formats are acceptable as well, as defined +in the `Tendermint crypto +library `__. The Merkle tree +used in Basecoin is a balanced, binary search tree, which we call an +`IAVL tree `__. + +Transactions +~~~~~~~~~~~~ + +Basecoin defines a transaction type, the ``SendTx``, which allows tokens +to be sent to other accounts. The ``SendTx`` takes a list of inputs and +a list of outputs, and transfers all the tokens listed in the inputs +from their corresponding accounts to the accounts listed in the output. +The ``SendTx`` is structured as follows: + +:: + + type SendTx struct { + Gas int64 `json:"gas"` + Fee Coin `json:"fee"` + Inputs []TxInput `json:"inputs"` + Outputs []TxOutput `json:"outputs"` + } + + type TxInput struct { + Address []byte `json:"address"` // Hash of the PubKey + Coins Coins `json:"coins"` // + Sequence int `json:"sequence"` // Must be 1 greater than the last committed TxInput + Signature crypto.Signature `json:"signature"` // Depends on the PubKey type and the whole Tx + PubKey crypto.PubKey `json:"pub_key"` // Is present iff Sequence == 0 + } + + type TxOutput struct { + Address []byte `json:"address"` // Hash of the PubKey + Coins Coins `json:"coins"` // + } + +Note the ``SendTx`` includes a field for ``Gas`` and ``Fee``. The +``Gas`` limits the total amount of computation that can be done by the +transaction, while the ``Fee`` refers to the total amount paid in fees. +This is slightly different from Ethereum's concept of ``Gas`` and +``GasPrice``, where ``Fee = Gas x GasPrice``. In Basecoin, the ``Gas`` +and ``Fee`` are independent, and the ``GasPrice`` is implicit. + +In Basecoin, the ``Fee`` is meant to be used by the validators to inform +the ordering of transactions, like in Bitcoin. And the ``Gas`` is meant +to be used by the application plugin to control its execution. There is +currently no means to pass ``Fee`` information to the Tendermint +validators, but it will come soon... + +Note also that the ``PubKey`` only needs to be sent for +``Sequence == 0``. After that, it is stored under the account in the +Merkle tree and subsequent transactions can exclude it, using only the +``Address`` to refer to the sender. Ethereum does not require public +keys to be sent in transactions as it uses a different elliptic curve +scheme which enables the public key to be derived from the signature +itself. + +Finally, note that the use of multiple inputs and multiple outputs +allows us to send many different types of tokens between many different +accounts at once in an atomic transaction. Thus, the ``SendTx`` can +serve as a basic unit of decentralized exchange. When using multiple +inputs and outputs, you must make sure that the sum of coins of the +inputs equals the sum of coins of the outputs (no creating money), and +that all accounts that provide inputs have signed the transaction. + +Clean Up +-------- + +**WARNING:** Running these commands will wipe out any existing +information in both the ``~/.basecli`` and ``~/.basecoin`` directories, +including private keys. + +To remove all the files created and refresh your environment (e.g., if +starting this tutorial again or trying something new), the following +commands are run: + +:: + + basecli reset_all + rm -rf ~/.basecoin + +In this guide, we introduced the ``basecoin`` and ``basecli`` tools, +demonstrated how to start a new basecoin blockchain and how to send +tokens between accounts, and discussed the underlying data types for +accounts and transactions, specifically the ``Account`` and the +``SendTx``. diff --git a/docs/basecoin/extensions.rst b/docs/basecoin/extensions.rst new file mode 100644 index 0000000000..c1db864a3c --- /dev/null +++ b/docs/basecoin/extensions.rst @@ -0,0 +1,215 @@ +Basecoin Extensions +=================== + +TODO: re-write for extensions + +In the `previous guide `__, we saw how to use the +``basecoin`` tool to start a blockchain and the ``basecli`` tools to +send transactions. We also learned about ``Account`` and ``SendTx``, the +basic data types giving us a multi-asset cryptocurrency. Here, we will +demonstrate how to extend the tools to use another transaction type, the +``AppTx``, so we can send data to a custom plugin. In this example we +explore a simple plugin named ``counter``. + +Example Plugin +-------------- + +The design of the ``basecoin`` tool makes it easy to extend for custom +functionality. The Counter plugin is bundled with basecoin, so if you +have already `installed basecoin `__ and run +``make install`` then you should be able to run a full node with +``counter`` and the a light-client ``countercli`` from terminal. The +Counter plugin is just like the ``basecoin`` tool. They both use the +same library of commands, including one for signing and broadcasting +``SendTx``. + +Counter transactions take two custom inputs, a boolean argument named +``valid``, and a coin amount named ``countfee``. The transaction is only +accepted if both ``valid`` is set to true and the transaction input +coins is greater than ``countfee`` that the user provides. + +A new blockchain can be initialized and started just like in the +`previous guide `__: + +:: + + # WARNING: this wipes out data - but counter is only for demos... + rm -rf ~/.counter + countercli reset_all + + countercli keys new cool + countercli keys new friend + + counter init $(countercli keys get cool | awk '{print $2}') + + counter start + +The default files are stored in ``~/.counter``. In another window we can +initialize the light-client and send a transaction: + +:: + + countercli init --node=tcp://localhost:46657 --genesis=$HOME/.counter/genesis.json + + YOU=$(countercli keys get friend | awk '{print $2}') + countercli tx send --name=cool --amount=1000mycoin --to=$YOU --sequence=1 + +But the Counter has an additional command, ``countercli tx counter``, +which crafts an ``AppTx`` specifically for this plugin: + +:: + + countercli tx counter --name cool + countercli tx counter --name cool --valid + +The first transaction is rejected by the plugin because it was not +marked as valid, while the second transaction passes. We can build +plugins that take many arguments of different types, and easily extend +the tool to accomodate them. Of course, we can also expose queries on +our plugin: + +:: + + countercli query counter + +Tada! We can now see that our custom counter plugin transactions went +through. You should see a Counter value of 1 representing the number of +valid transactions. If we send another transaction, and then query +again, we will see the value increment. Note that we need the sequence +number here to send the coins (it didn't increment when we just pinged +the counter) + +:: + + countercli tx counter --name cool --countfee=2mycoin --sequence=2 --valid + countercli query counter + +The Counter value should be 2, because we sent a second valid +transaction. And this time, since we sent a countfee (which must be less +than or equal to the total amount sent with the tx), it stores the +``TotalFees`` on the counter as well. + +Keep it mind that, just like with ``basecli``, the ``countercli`` +verifies a proof that the query response is correct and up-to-date. + +Now, before we implement our own plugin and tooling, it helps to +understand the ``AppTx`` and the design of the plugin system. + +AppTx +----- + +The ``AppTx`` is similar to the ``SendTx``, but instead of sending coins +from inputs to outputs, it sends coins from one input to a plugin, and +can also send some data. + +:: + + type AppTx struct { + Gas int64 `json:"gas"` + Fee Coin `json:"fee"` + Input TxInput `json:"input"` + Name string `json:"type"` // Name of the plugin + Data []byte `json:"data"` // Data for the plugin to process + } + +The ``AppTx`` enables Basecoin to be extended with arbitrary additional +functionality through the use of plugins. The ``Name`` field in the +``AppTx`` refers to the particular plugin which should process the +transaction, and the ``Data`` field of the ``AppTx`` is the data to be +forwarded to the plugin for processing. + +Note the ``AppTx`` also has a ``Gas`` and ``Fee``, with the same meaning +as for the ``SendTx``. It also includes a single ``TxInput``, which +specifies the sender of the transaction, and some coins that can be +forwarded to the plugin as well. + +Plugins +------- + +A plugin is simply a Go package that implements the ``Plugin`` +interface: + +:: + + type Plugin interface { + + // Name of this plugin, should be short. + Name() string + + // Run a transaction from ABCI DeliverTx + RunTx(store KVStore, ctx CallContext, txBytes []byte) (res abci.Result) + + // Other ABCI message handlers + SetOption(store KVStore, key string, value string) (log string) + InitChain(store KVStore, vals []*abci.Validator) + BeginBlock(store KVStore, hash []byte, header *abci.Header) + EndBlock(store KVStore, height uint64) (res abci.ResponseEndBlock) + } + + type CallContext struct { + CallerAddress []byte // Caller's Address (hash of PubKey) + CallerAccount *Account // Caller's Account, w/ fee & TxInputs deducted + Coins Coins // The coins that the caller wishes to spend, excluding fees + } + +The workhorse of the plugin is ``RunTx``, which is called when an +``AppTx`` is processed. The ``Data`` from the ``AppTx`` is passed in as +the ``txBytes``, while the ``Input`` from the ``AppTx`` is used to +populate the ``CallContext``. + +Note that ``RunTx`` also takes a ``KVStore`` - this is an abstraction +for the underlying Merkle tree which stores the account data. By passing +this to the plugin, we enable plugins to update accounts in the Basecoin +state directly, and also to store arbitrary other information in the +state. In this way, the functionality and state of a Basecoin-derived +cryptocurrency can be greatly extended. One could imagine going so far +as to implement the Ethereum Virtual Machine as a plugin! + +For details on how to initialize the state using ``SetOption``, see the +`guide to using the basecoin tool `__. + +Implement your own +------------------ + +To implement your own plugin and tooling, make a copy of +``docs/guide/counter``, and modify the code accordingly. Here, we will +briefly describe the design and the changes to be made, but see the code +for more details. + +First is the ``cmd/counter/main.go``, which drives the program. It can +be left alone, but you should change any occurrences of ``counter`` to +whatever your plugin tool is going to be called. You must also register +your plugin(s) with the basecoin app with ``RegisterStartPlugin``. + +The light-client is located in ``cmd/countercli/main.go`` and allows for +transaction and query commands. This file can also be left mostly alone +besides replacing the application name and adding references to new +plugin commands. + +Next is the custom commands in ``cmd/countercli/commands/``. These files +are where we extend the tool with any new commands and flags we need to +send transactions or queries to our plugin. You define custom ``tx`` and +``query`` subcommands, which are registered in ``main.go`` (avoiding +``init()`` auto-registration, for less magic and more control in the +main executable). + +Finally is ``plugins/counter/counter.go``, where we provide an +implementation of the ``Plugin`` interface. The most important part of +the implementation is the ``RunTx`` method, which determines the meaning +of the data sent along in the ``AppTx``. In our example, we define a new +transaction type, the ``CounterTx``, which we expect to be encoded in +the ``AppTx.Data``, and thus to be decoded in the ``RunTx`` method, and +used to update the plugin state. + +For more examples and inspiration, see our `repository of example +plugins `__. + +Conclusion +---------- + +In this guide, we demonstrated how to create a new plugin and how to +extend the ``basecoin`` tool to start a blockchain with the plugin +enabled and send transactions to it. In the next guide, we introduce a +`plugin for Inter Blockchain Communication `__, which allows us +to publish proofs of the state of one blockchain to another, and thus to +transfer tokens and data between them. diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000000..3af51ef959 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,170 @@ +# -*- coding: utf-8 -*- +# +# Cosmos-SDK documentation build configuration file, created by +# sphinx-quickstart on Fri Sep 1 21:37:02 2017. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + +import sphinx_rtd_theme + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'Cosmos-SDK' +copyright = u'2017, The Authors' +author = u'The Authors' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = u'' +# The full version, including alpha/beta/rc tags. +release = u'' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' +# html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# This is required for the alabaster theme +# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars +html_sidebars = { + '**': [ + 'about.html', + 'navigation.html', + 'relations.html', # needs 'show_related': True theme option to display + 'searchbox.html', + 'donate.html', + ] +} + + +# -- Options for HTMLHelp output ------------------------------------------ + +# Output file base name for HTML help builder. +htmlhelp_basename = 'Cosmos-SDKdoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'Cosmos-SDK.tex', u'Cosmos-SDK Documentation', + u'The Authors', 'manual'), +] + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'cosmos-sdk', u'Cosmos-SDK Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'Cosmos-SDK', u'Cosmos-SDK Documentation', + author, 'Cosmos-SDK', 'One line description of project.', + 'Miscellaneous'), +] diff --git a/docs/design.md b/docs/design.md deleted file mode 100644 index 1bb9964d76..0000000000 --- a/docs/design.md +++ /dev/null @@ -1,96 +0,0 @@ -## Design Document - -### Object-Capability Model - -When thinking about security, it's good to start with a specific threat model. Our threat model is the following: - -> We want to assume a thriving ecosystem of Cosmos-SDK modules that are easy to compose into a blockchain application. Some of these modules will be faulty or malicious. - -The Cosmos-SDK is designed to address this threat by being the foundation of an object capability system. - -``` -The structural properties of object capability systems favor -modularity in code design and ensure reliable encapsulation in -code implementation. - -These structural properties facilitate the analysis of some -security properties of an object-capability program or operating -system. Some of these — in particular, information flow properties -— can be analyzed at the level of object references and -connectivity, independent of any knowledge or analysis of the code -that determines the behavior of the objects. As a consequence, -these security properties can be established and maintained in the -presence of new objects that contain unknown and possibly -malicious code. - -These structural properties stem from the two rules governing -access to existing objects: - -1) An object A can send a message to B only if object A holds a -reference to B. - -2) An object A can obtain a reference to C only -if object A receives a message containing a reference to C. As a -consequence of these two rules, an object can obtain a reference -to another object only through a preexisting chain of references. -In short, "Only connectivity begets connectivity." - -- https://en.wikipedia.org/wiki/Object-capability_model -``` - -Strictly speaking, Golang does not implement object capabilities completely, because of several issues: - -* pervasive ability to import primitive modules (e.g. "unsafe", "os") -* pervasive ability to override module vars https://github.com/golang/go/issues/23161 -* data-race vulnerability where 2+ goroutines can create illegal interface values - -The first is easy to catch by auditing imports and using a proper dependency version control system like Glide. The second and third are unfortunate but it can be audited with some cost. - -Perhaps [Go2 will implement the object capability model](https://github.com/golang/go/issues/23157). - -### What does it look like? - -Only reveal what is necessary to get the work done. - -For example, the following code snippet violates the object capabilities principle: - -```golang -type AppAccount struct {...} -var account := &AppAccount{ - Address: pub.Address(), - Coins: sdk.Coins{{"ATM", 100}}, -} -var sumValue := externalModule.ComputeSumValue(account) -``` - -The method "ComputeSumValue" implies a pure function, yet the implied capability of accepting a pointer value is the capability to modify that value. The preferred method signature should take a copy instead. - -```golang -var sumValue := externalModule.ComputeSumValue(*account) -``` - -In the Cosmos SDK, you can see the application of this principle in the basecoin examples folder. - -```golang -// File: cosmos-sdk/examples/basecoin/app/init_handlers.go -package app - -import ( - "github.com/cosmos/cosmos-sdk/x/bank" - "github.com/cosmos/cosmos-sdk/x/sketchy" -) - -func (app *BasecoinApp) initRouterHandlers() { - - // All handlers must be added here. - // The order matters. - app.router.AddRoute("bank", bank.NewHandler(app.accountMapper)) - app.router.AddRoute("sketchy", sketchy.NewHandler()) -} -``` - -In the Basecoin example, the sketchy handler isn't provided an account mapper, which does provide the bank handler with the capability (in conjunction with the context of a transaction run). - -### More Resources - -* Read the [Cosmos SDK Guide](./guide.md). diff --git a/docs/graphics/cosmos-sdk-image.png b/docs/graphics/cosmos-sdk-image.png new file mode 100644 index 0000000000..ab9a4abc68 Binary files /dev/null and b/docs/graphics/cosmos-sdk-image.png differ diff --git a/docs/guide.md b/docs/guide.md index 6f4cfc1daf..ae2829b451 100644 --- a/docs/guide.md +++ b/docs/guide.md @@ -4,7 +4,7 @@ If you want to see some examples, take a look at the [examples/basecoin](/exampl ## Design Goals -The design of the Cosmos SDK is based on the principles of "cababilities systems". +The design of the Cosmos SDK is based on the principles of "capabilities systems". ## Capabilities systems diff --git a/docs/ibc.rst b/docs/ibc.rst new file mode 100644 index 0000000000..30b9a16faf --- /dev/null +++ b/docs/ibc.rst @@ -0,0 +1,424 @@ +IBC +=== + +TODO: update in light of latest SDK (this document is currently out of date) + +One of the most exciting elements of the Cosmos Network is the +InterBlockchain Communication (IBC) protocol, which enables +interoperability across different blockchains. We implemented IBC as a +basecoin plugin, and we'll show you how to use it to send tokens across +blockchains! + +Please note: this tutorial assumes familiarity with the Cosmos SDK. + +The IBC plugin defines a new set of transactions as subtypes of the +``AppTx``. The plugin's functionality is accessed by setting the +``AppTx.Name`` field to ``"IBC"``, and setting the ``Data`` field to the +serialized IBC transaction type. + +We'll demonstrate exactly how this works below. + +Inter BlockChain Communication +------------------------------ + +Let's review the IBC protocol. The purpose of IBC is to enable one +blockchain to function as a light-client of another. Since we are using +a classical Byzantine Fault Tolerant consensus algorithm, light-client +verification is cheap and easy: all we have to do is check validator +signatures on the latest block, and verify a Merkle proof of the state. + +In Tendermint, validators agree on a block before processing it. This +means that the signatures and state root for that block aren't included +until the next block. Thus, each block contains a field called +``LastCommit``, which contains the votes responsible for committing the +previous block, and a field in the block header called ``AppHash``, +which refers to the Merkle root hash of the application after processing +the transactions from the previous block. So, if we want to verify the +``AppHash`` from height H, we need the signatures from ``LastCommit`` at +height H+1. (And remember that this ``AppHash`` only contains the +results from all transactions up to and including block H-1) + +Unlike Proof-of-Work, the light-client protocol does not need to +download and check all the headers in the blockchain - the client can +always jump straight to the latest header available, so long as the +validator set has not changed much. If the validator set is changing, +the client needs to track these changes, which requires downloading +headers for each block in which there is a significant change. Here, we +will assume the validator set is constant, and postpone handling +validator set changes for another time. + +Now we can describe exactly how IBC works. Suppose we have two +blockchains, ``chain1`` and ``chain2``, and we want to send some data +from ``chain1`` to ``chain2``. We need to do the following: 1. Register +the details (ie. chain ID and genesis configuration) of ``chain1`` on +``chain2`` 2. Within ``chain1``, broadcast a transaction that creates an +outgoing IBC packet destined for ``chain2`` 3. Broadcast a transaction +to ``chain2`` informing it of the latest state (ie. header and commit +signatures) of ``chain1`` 4. Post the outgoing packet from ``chain1`` to +``chain2``, including the proof that it was indeed committed on +``chain1``. Note ``chain2`` can only verify this proof because it has a +recent header and commit. + +Each of these steps involves a separate IBC transaction type. Let's take +them up in turn. + +IBCRegisterChainTx +~~~~~~~~~~~~~~~~~~ + +The ``IBCRegisterChainTx`` is used to register one chain on another. It +contains the chain ID and genesis configuration of the chain to +register: + +:: + + type IBCRegisterChainTx struct { BlockchainGenesis } + + type BlockchainGenesis struct { ChainID string Genesis string } + +This transaction should only be sent once for a given chain ID, and +successive sends will return an error. + +IBCUpdateChainTx +~~~~~~~~~~~~~~~~ + +The ``IBCUpdateChainTx`` is used to update the state of one chain on +another. It contains the header and commit signatures for some block in +the chain: + +:: + + type IBCUpdateChainTx struct { + Header tm.Header + Commit tm.Commit + } + +In the future, it needs to be updated to include changes to the +validator set as well. Anyone can relay an ``IBCUpdateChainTx``, and +they only need to do so as frequently as packets are being sent or the +validator set is changing. + +IBCPacketCreateTx +~~~~~~~~~~~~~~~~~ + +The ``IBCPacketCreateTx`` is used to create an outgoing packet on one +chain. The packet itself contains the source and destination chain IDs, +a sequence number (i.e. an integer that increments with every message +sent between this pair of chains), a packet type (e.g. coin, data, +etc.), and a payload. + +:: + + type IBCPacketCreateTx struct { + Packet + } + + type Packet struct { + SrcChainID string + DstChainID string + Sequence uint64 + Type string + Payload []byte + } + +We have yet to define the format for the payload, so, for now, it's just +arbitrary bytes. + +One way to think about this is that ``chain2`` has an account on +``chain1``. With a ``IBCPacketCreateTx`` on ``chain1``, we send funds to +that account. Then we can prove to ``chain2`` that there are funds +locked up for it in it's account on ``chain1``. Those funds can only be +unlocked with corresponding IBC messages back from ``chain2`` to +``chain1`` sending the locked funds to another account on ``chain1``. + +IBCPacketPostTx +~~~~~~~~~~~~~~~ + +The ``IBCPacketPostTx`` is used to post an outgoing packet from one +chain to another. It contains the packet and a proof that the packet was +committed into the state of the sending chain: + +:: + + type IBCPacketPostTx struct { + FromChainID string // The immediate source of the packet, not always Packet.SrcChainID + FromChainHeight uint64 // The block height in which Packet was committed, to check Proof Packet + Proof *merkle.IAVLProof + } + +The proof is a Merkle proof in an IAVL tree, our implementation of a +balanced, Merklized binary search tree. It contains a list of nodes in +the tree, which can be hashed together to get the Merkle root hash. This +hash must match the ``AppHash`` contained in the header at +``FromChainHeight + 1`` + +- note the ``+ 1`` is necessary since ``FromChainHeight`` is the height + in which the packet was committed, and the resulting state root is + not included until the next block. + +IBC State +~~~~~~~~~ + +Now that we've seen all the transaction types, let's talk about the +state. Each chain stores some IBC state in its Merkle tree. For each +chain being tracked by our chain, we store: + +- Genesis configuration +- Latest state +- Headers for recent heights + +We also store all incoming (ingress) and outgoing (egress) packets. + +The state of a chain is updated every time an ``IBCUpdateChainTx`` is +committed. New packets are added to the egress state upon +``IBCPacketCreateTx``. New packets are added to the ingress state upon +``IBCPacketPostTx``, assuming the proof checks out. + +Merkle Queries +-------------- + +The Basecoin application uses a single Merkle tree that is shared across +all its state, including the built-in accounts state and all plugin +state. For this reason, it's important to use explicit key names and/or +hashes to ensure there are no collisions. + +We can query the Merkle tree using the ABCI Query method. If we pass in +the correct key, it will return the corresponding value, as well as a +proof that the key and value are contained in the Merkle tree. + +The results of a query can thus be used as proof in an +``IBCPacketPostTx``. + +Relay +----- + +While we need all these packet types internally to keep track of all the +proofs on both chains in a secure manner, for the normal work-flow, we +can run a relay node that handles the cross-chain interaction. + +In this case, there are only two steps. First ``basecoin relay init``, +which must be run once to register each chain with the other one, and +make sure they are ready to send and recieve. And then +``basecoin relay start``, which is a long-running process polling the +queue on each side, and relaying all new message to the other block. + +This requires that the relay has access to accounts with some funds on +both chains to pay for all the ibc packets it will be forwarding. + +Try it out +---------- + +Now that we have all the background knowledge, let's actually walk +through the tutorial. + +Make sure you have installed `basecoin and +basecli `__. + +Basecoin is a framework for creating new cryptocurrency applications. It +comes with an ``IBC`` plugin enabled by default. + +You will also want to install the +`jq `__ for handling JSON at the command +line. + +If you have any trouble with this, you can also look at the `test +scripts `__ or just run ``make test_cli`` in basecoin +repo. Otherwise, open up 5 (yes 5!) terminal tabs.... + +Preliminaries +~~~~~~~~~~~~~ + +:: + + # first, clean up any old garbage for a fresh slate... + rm -rf ~/.ibcdemo/ + +Let's start by setting up some environment variables and aliases: + +:: + + export BCHOME1_CLIENT=~/.ibcdemo/chain1/client + export BCHOME1_SERVER=~/.ibcdemo/chain1/server + export BCHOME2_CLIENT=~/.ibcdemo/chain2/client + export BCHOME2_SERVER=~/.ibcdemo/chain2/server + alias basecli1="basecli --home $BCHOME1_CLIENT" + alias basecli2="basecli --home $BCHOME2_CLIENT" + alias basecoin1="basecoin --home $BCHOME1_SERVER" + alias basecoin2="basecoin --home $BCHOME2_SERVER" + +This will give us some new commands to use instead of raw ``basecli`` +and ``basecoin`` to ensure we're using the right configuration for the +chain we want to talk to. + +We also want to set some chain IDs: + +:: + + export CHAINID1="test-chain-1" + export CHAINID2="test-chain-2" + +And since we will run two different chains on one machine, we need to +maintain different sets of ports: + +:: + + export PORT_PREFIX1=1234 + export PORT_PREFIX2=2345 + export RPC_PORT1=${PORT_PREFIX1}7 + export RPC_PORT2=${PORT_PREFIX2}7 + +Setup Chain 1 +~~~~~~~~~~~~~ + +Now, let's create some keys that we can use for accounts on +test-chain-1: + +:: + + basecli1 keys new money + basecli1 keys new gotnone + export MONEY=$(basecli1 keys get money | awk '{print $2}') + export GOTNONE=$(basecli1 keys get gotnone | awk '{print $2}') + +and create an initial configuration giving lots of coins to the $MONEY +key: + +:: + + basecoin1 init --chain-id $CHAINID1 $MONEY + +Now start basecoin: + +:: + + sed -ie "s/4665/$PORT_PREFIX1/" $BCHOME1_SERVER/config.toml + + basecoin1 start &> basecoin1.log & + +Note the ``sed`` command to replace the ports in the config file. You +can follow the logs with ``tail -f basecoin1.log`` + +Now we can attach the client to the chain and verify the state. The +first account should have money, the second none: + +:: + + basecli1 init --node=tcp://localhost:${RPC_PORT1} --genesis=${BCHOME1_SERVER}/genesis.json + basecli1 query account $MONEY + basecli1 query account $GOTNONE + +Setup Chain 2 +~~~~~~~~~~~~~ + +This is the same as above, except with ``basecli2``, ``basecoin2``, and +``$CHAINID2``. We will also need to change the ports, since we're +running another chain on the same local machine. + +Let's create new keys for test-chain-2: + +:: + + basecli2 keys new moremoney + basecli2 keys new broke + MOREMONEY=$(basecli2 keys get moremoney | awk '{print $2}') + BROKE=$(basecli2 keys get broke | awk '{print $2}') + +And prepare the genesis block, and start the server: + +:: + + basecoin2 init --chain-id $CHAINID2 $(basecli2 keys get moremoney | awk '{print $2}') + + sed -ie "s/4665/$PORT_PREFIX2/" $BCHOME2_SERVER/config.toml + + basecoin2 start &> basecoin2.log & + +Now attach the client to the chain and verify the state. The first +account should have money, the second none: + +:: + + basecli2 init --node=tcp://localhost:${RPC_PORT2} --genesis=${BCHOME2_SERVER}/genesis.json + basecli2 query account $MOREMONEY + basecli2 query account $BROKE + +Connect these chains +~~~~~~~~~~~~~~~~~~~~ + +OK! So we have two chains running on your local machine, with different +keys on each. Let's hook them up together by starting a relay process to +forward messages from one chain to the other. + +The relay account needs some money in it to pay for the ibc messages, so +for now, we have to transfer some cash from the rich accounts before we +start the actual relay. + +:: + + # note that this key.json file is a hardcoded demo for all chains, this will + # be updated in a future release + RELAY_KEY=$BCHOME1_SERVER/key.json + RELAY_ADDR=$(cat $RELAY_KEY | jq .address | tr -d \") + + basecli1 tx send --amount=100000mycoin --sequence=1 --to=$RELAY_ADDR--name=money + basecli1 query account $RELAY_ADDR + + basecli2 tx send --amount=100000mycoin --sequence=1 --to=$RELAY_ADDR --name=moremoney + basecli2 query account $RELAY_ADDR + +Now we can start the relay process. + +:: + + basecoin relay init --chain1-id=$CHAINID1 --chain2-id=$CHAINID2 \ + --chain1-addr=tcp://localhost:${RPC_PORT1} --chain2-addr=tcp://localhost:${RPC_PORT2} \ + --genesis1=${BCHOME1_SERVER}/genesis.json --genesis2=${BCHOME2_SERVER}/genesis.json \ + --from=$RELAY_KEY + + basecoin relay start --chain1-id=$CHAINID1 --chain2-id=$CHAINID2 \ + --chain1-addr=tcp://localhost:${RPC_PORT1} --chain2-addr=tcp://localhost:${RPC_PORT2} \ + --from=$RELAY_KEY &> relay.log & + +This should start up the relay, and assuming no error messages came out, +the two chains are now fully connected over IBC. Let's use this to send +our first tx accross the chains... + +Sending cross-chain payments +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The hard part is over, we set up two blockchains, a few private keys, +and a secure relay between them. Now we can enjoy the fruits of our +labor... + +:: + + # Here's an empty account on test-chain-2 + basecli2 query account $BROKE + +:: + + # Let's send some funds from test-chain-1 + basecli1 tx send --amount=12345mycoin --sequence=2 --to=test-chain-2/$BROKE --name=money + +:: + + # give it time to arrive... + sleep 2 + # now you should see 12345 coins! + basecli2 query account $BROKE + +You're no longer broke! Cool, huh? Now have fun exploring and sending +coins across the chains. And making more accounts as you want to. + +Conclusion +---------- + +In this tutorial we explained how IBC works, and demonstrated how to use +it to communicate between two chains. We did the simplest communciation +possible: a one way transfer of data from chain1 to chain2. The most +important part was that we updated chain2 with the latest state (i.e. +header and commit) of chain1, and then were able to post a proof to +chain2 that a packet was committed to the outgoing state of chain1. + +In a future tutorial, we will demonstrate how to use IBC to actually +transfer tokens between two blockchains, but we'll do it with real +testnets deployed across multiple nodes on the network. Stay tuned! diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000000..80d5dd48d5 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,66 @@ +.. Cosmos-SDK documentation master file, created by + sphinx-quickstart on Fri Sep 1 21:37:02 2017. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to the Cosmos SDK! +========================== + +.. image:: graphics/cosmos-sdk-image.png + :height: 250px + :width: 500px + :align: center + +SDK +--- + +.. One maxdepth for now + +.. toctree:: + :maxdepth: 1 + + sdk/overview.rst + sdk/install.rst + sdk/glossary.rst + +.. Basecoin +.. -------- + +.. .. toctree:: + :maxdepth: 2 + +.. basecoin/basics.rst +.. basecoin/extensions.rst + +Extensions +---------- + +Replay Protection +~~~~~~~~~~~~~~~~~ + +.. toctree:: + :maxdepth: 1 + + x/replay-protection.rst + + +Staking +~~~~~~~ + +.. toctree:: + :maxdepth: 1 + + staking/intro.rst + staking/key-management.rst + staking/local-testnet.rst + staking/public-testnet.rst + +Extras +------ + +.. One maxdepth for now + +.. toctree:: + :maxdepth: 1 + + ibc.rst diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000000..916e57ee79 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,36 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=python -msphinx +) +set SOURCEDIR=. +set BUILDDIR=_build +set SPHINXPROJ=Cosmos-SDK + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The Sphinx module was not found. Make sure you have Sphinx installed, + echo.then set the SPHINXBUILD environment variable to point to the full + echo.path of the 'sphinx-build' executable. Alternatively you may add the + echo.Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% + +:end +popd diff --git a/docs/sdk/glossary.rst b/docs/sdk/glossary.rst new file mode 100644 index 0000000000..1480e7b38c --- /dev/null +++ b/docs/sdk/glossary.rst @@ -0,0 +1,231 @@ +Glossary +======== + +This glossary defines many terms used throughout documentation of Quark. +If there is every a concept that seems unclear, check here. This is +mainly to provide a background and general understanding of the +different words and concepts that are used. Other documents will explain +in more detail how to combine these concepts to build a particular +application. + +Transaction +----------- + +A transaction is a packet of binary data that contains all information +to validate and perform an action on the blockchain. The only other data +that it interacts with is the current state of the chain (key-value +store), and it must have a deterministic action. The transaction is the +main piece of one request. + +We currently make heavy use of +`go-wire `__ and +`data `__ to +provide binary and json encodings and decodings for ``struct`` or +interface\ ``objects. Here, encoding and decoding operations are designed to operate with interfaces nested any amount times (like an onion!). There is one public``\ TxMapper\` +in the basecoin root package, and all modules can register their own +transaction types there. This allows us to deserialize the entire +transaction in one location (even with types defined in other repos), to +easily embed an arbitrary transaction inside another without specifying +the type, and provide an automatic json representation allowing for +users (or apps) to inspect the chain. + +Note how we can wrap any other transaction, add a fee level, and not +worry about the encoding in our module any more? + +:: + + type Fee struct { + Fee coin.Coin `json:"fee"` + Payer basecoin.Actor `json:"payer"` // the address who pays the fee + Tx basecoin.Tx `json:"tx"` + } + +Context (ctx) +------------- + +As a request passes through the system, it may pick up information such +as the block height the request runs at. In order to carry this information +between modules it is saved to the context. Further, all information +must be deterministic from the context in which the request runs (based +on the transaction and the block it was included in) and can be used to +validate the transaction. + +Data Store +---------- + +In order to provide proofs to Tendermint, we keep all data in one +key-value (kv) store which is indexed with a merkle tree. This allows +for the easy generation of a root hash and proofs for queries without +requiring complex logic inside each module. Standardization of this +process also allows powerful light-client tooling as any store data may +be verified on the fly. + +The largest limitation of the current implemenation of the kv-store is +that interface that the application must use can only ``Get`` and +``Set`` single data points. That said, there are some data structures +like queues and range queries that are available in ``state`` package. +These provide higher-level functionality in a standard format, but have +not yet been integrated into the kv-store interface. + +Isolation +--------- + +One of the main arguments for blockchain is security. So while we +encourage the use of third-party modules, all developers must be +vigilant against security holes. If you use the +`stack `__ +package, it will provide two different types of compartmentalization +security. + +The first is to limit the working kv-store space of each module. When +``DeliverTx`` is called for a module, it is never given the entire data +store, but rather only its own prefixed subset of the store. This is +achieved by prefixing all keys transparently with +`` + 0x0``, using the null byte as a separator. Since the +module name must be a string, no malicious naming scheme can ever lead +to a collision. Inside a module, we can write using any key value we +desire without the possibility that we have modified data belonging to +separate module. + +The second is to add permissions to the transaction context. The +transaction context can specify that the tx has been signed by one or +multiple specific actors. + +A transactions will only be executed if the permission requirements have +been fulfilled. For example the sender of funds must have signed, or 2 +out of 3 multi-signature actors must have signed a joint account. To +prevent the forgery of account signatures from unintended modules each +permission is associated with the module that granted it (in this case +`auth `__), +and if a module tries to add a permission for another module, it will +panic. There is also protection if a module creates a brand new fake +context to trick the downstream modules. Each context enforces the rules +on how to make child contexts, and the stack builder enforces +that the context passed from one level to the next is a valid child of +the original one. + +These security measures ensure that modules can confidently write to +their local section of the database and trust the permissions associated +with the context, without concern of interference from other modules. +(Okay, if you see a bunch of C-code in the module traversing through all +the memory space of the application, then get worried....) + +Handler +------- + +The ABCI interface is handled by ``app``, which translates these data +structures into an internal format that is more convenient, but unable +to travel over the wire. The basic interface for any code that modifies +state is the ``Handler`` interface, which provides four methods: + +:: + + Name() string + CheckTx(ctx Context, store state.KVStore, tx Tx) (Result, error) + DeliverTx(ctx Context, store state.KVStore, tx Tx) (Result, error) + SetOption(l log.Logger, store state.KVStore, module, key, value string) (string, error) + +Note the ``Context``, ``KVStore``, and ``Tx`` as principal carriers of +information. And that Result is always success, and we have a second +error return for errors (which is much more standard golang that +``res.IsErr()``) + +The ``Handler`` interface is designed to be the basis for all modules +that execute transactions, and this can provide a large degree of code +interoperability, much like ``http.Handler`` does in golang web +development. + +Modules +------- + +TODO: update (s/Modules/handlers+mappers+stores/g) & add Msg + Tx (a signed message) + +A module is a set of functionality which should be typically designed as +self-sufficient. Common elements of a module are: + +- transaction types (either end transactions, or transaction wrappers) +- custom error codes +- data models (to persist in the kv-store) +- handler (to handle any end transactions) + +Dispatcher +---------- + +We usually will want to have multiple modules working together, and need +to make sure the correct transactions get to the correct module. So we +have ``coin`` sending money, ``roles`` to create multi-sig accounts, and +``ibc`` for following other chains all working together without +interference. + +We can then register a ``Dispatcher``, which +also implements the ``Handler`` interface. We then register a list of +modules with the dispatcher. Every module has a unique ``Name()``, which +is used for isolating its state space. We use this same name for routing +transactions. Each transaction implementation must be registed with +go-wire via ``TxMapper``, so we just look at the registered name of this +transaction, which should be of the form ``/xxx``. The +dispatcher grabs the appropriate module name from the tx name and routes +it if the module is present. + +This all seems like a bit of magic, but really we're just making use of +go-wire magic that we are already using, rather than add another layer. +For all the transactions to be properly routed, the only thing you need +to remember is to use the following pattern: + +:: + + const ( + NameCoin = "coin" + TypeSend = NameCoin + "/send" + ) + +Permissions +----------- + +TODO: replaces perms with object capabilities/object capability keys +- get rid of IPC + +IPC requires a more complex permissioning system to allow the modules to +have limited access to each other and also to allow more types of +permissions than simple public key signatures. Rather than just use an +address to identify who is performing an action, we can use a more +complex structure: + +:: + + type Actor struct { + ChainID string `json:"chain"` // this is empty unless it comes from a different chain + App string `json:"app"` // the app that the actor belongs to + Address data.Bytes `json:"addr"` // arbitrary app-specific unique id + } + +Here, the ``Actor`` abstracts any address that can authorize actions, +hold funds, or initiate any sort of transaction. It doesn't just have to +be a pubkey on this chain, it could stem from another app (such as +multi-sig account), or even another chain (via IBC) + +``ChainID`` is for IBC, discussed below. Let's focus on ``App`` and +``Address``. For a signature, the App is ``auth``, and any modules can +check to see if a specific public key address signed like this +``ctx.HasPermission(auth.SigPerm(addr))``. However, we can also +authorize a tx with ``roles``, which handles multi-sig accounts, it +checks if there were enough signatures by checking as above, then it can +add the role permission like +``ctx= ctx.WithPermissions(NewPerm(assume.Role))`` + +In addition to the permissions schema, the Actors are addresses just +like public key addresses. So one can create a mulit-sig role, then send +coin there, which can only be moved upon meeting the authorization +requirements from that module. ``coin`` doesn't even know the existence +of ``roles`` and one could build any other sort of module to provide +permissions (like bind the outcome of an election to move coins or to +modify the accounts on a role). + +One idea - not yet implemented - is to provide scopes on the +permissions. Currently, if I sign a transaction to one module, it can +pass it on to any other module over IPC with the same permissions. It +could move coins, vote in an election, or anything else. Ideally, when +signing, one could also specify the scope(s) that this signature +authorizes. The `oauth +protocol `__ also has to deal +with a similar problem, and maybe could provide some inspiration. diff --git a/docs/sdk/install.rst b/docs/sdk/install.rst new file mode 100644 index 0000000000..4857f63e79 --- /dev/null +++ b/docs/sdk/install.rst @@ -0,0 +1,35 @@ +Install +======= + +If you aren't used to compile go programs and just want the released +version of the code, please head to our +`downloads `__ page to get a +pre-compiled binary for your platform. + +Usually, Cosmos SDK can be installed like a normal Go program: + +:: + + go get -u github.com/cosmos/cosmos-sdk + +If the dependencies have been updated with breaking changes, or if +another branch is required, ``glide`` is used for dependency management. +Thus, assuming you've already run ``go get`` or otherwise cloned the +repo, the correct way to install is: + +:: + + cd $GOPATH/src/github.com/cosmos/cosmos-sdk + git pull origin master + make all + +This will create the ``basecoin`` binary in ``$GOPATH/bin``. +``make all`` implies ``make get_vendor_deps`` and uses ``glide`` to +install the correct version of all dependencies. It also tests the code, +including some cli tests to make sure your binary behaves properly. + +If you need another branch, make sure to run ``git checkout `` +before ``make all``. And if you switch branches a lot, especially +touching other tendermint repos, you may need to ``make fresh`` +sometimes so glide doesn't get confused with all the branches and +versions lying around. diff --git a/docs/sdk/overview.rst b/docs/sdk/overview.rst new file mode 100644 index 0000000000..d7639d4943 --- /dev/null +++ b/docs/sdk/overview.rst @@ -0,0 +1,435 @@ +Overview +======== + +The SDK design optimizes flexibility and security. The +framework is designed around a modular execution stack which allows +applications to mix and match elements as desired. In addition, +all modules are sandboxed for greater application security. + +Framework Overview +------------------ + +Object-Capability Model +~~~~~~~~~~~~~~~~~~~~~~~ + +When thinking about security, it's good to start with a specific threat model. Our threat model is the following: + +:: + + We assume that a thriving ecosystem of Cosmos-SDK modules that are easy to compose into a blockchain application will contain faulty or malicious modules. + +The Cosmos-SDK is designed to address this threat by being the foundation of an object capability system. + +:: + + The structural properties of object capability systems favor + modularity in code design and ensure reliable encapsulation in + code implementation. + + These structural properties facilitate the analysis of some + security properties of an object-capability program or operating + system. Some of these — in particular, information flow properties + — can be analyzed at the level of object references and + connectivity, independent of any knowledge or analysis of the code + that determines the behavior of the objects. As a consequence, + these security properties can be established and maintained in the + presence of new objects that contain unknown and possibly + malicious code. + + These structural properties stem from the two rules governing + access to existing objects: + + 1) An object A can send a message to B only if object A holds a + reference to B. + + 2) An object A can obtain a reference to C only + if object A receives a message containing a reference to C. As a + consequence of these two rules, an object can obtain a reference + to another object only through a preexisting chain of references. + In short, "Only connectivity begets connectivity." + +See the `wikipedia article `__ for more information. + +Strictly speaking, Golang does not implement object capabilities completely, because of several issues: + +* pervasive ability to import primitive modules (e.g. "unsafe", "os") +* pervasive ability to override module vars https://github.com/golang/go/issues/23161 +* data-race vulnerability where 2+ goroutines can create illegal interface values + +The first is easy to catch by auditing imports and using a proper dependency version control system like Glide. The second and third are unfortunate but it can be audited with some cost. + +Perhaps `Go2 will implement the object capability model `__. + +What does it look like? +^^^^^^^^^^^^^^^^^^^^^^^ + +Only reveal what is necessary to get the work done. + +For example, the following code snippet violates the object capabilities principle: + +:: + + type AppAccount struct {...} + var account := &AppAccount{ + Address: pub.Address(), + Coins: sdk.Coins{{"ATM", 100}}, + } + var sumValue := externalModule.ComputeSumValue(account) + +The method "ComputeSumValue" implies a pure function, yet the implied capability of accepting a pointer value is the capability to modify that value. The preferred method signature should take a copy instead. + +:: + + var sumValue := externalModule.ComputeSumValue(*account) + +In the Cosmos SDK, you can see the application of this principle in the basecoin examples folder. + +:: + + // File: cosmos-sdk/examples/basecoin/app/init_handlers.go + package app + + import ( + "github.com/cosmos/cosmos-sdk/x/bank" + "github.com/cosmos/cosmos-sdk/x/sketchy" + ) + + func (app *BasecoinApp) initRouterHandlers() { + + // All handlers must be added here. + // The order matters. + app.router.AddRoute("bank", bank.NewHandler(app.accountMapper)) + app.router.AddRoute("sketchy", sketchy.NewHandler()) + } + +In the Basecoin example, the sketchy handler isn't provided an account mapper, which does provide the bank handler with the capability (in conjunction with the context of a transaction run). + +Security Overview +----------------- + +For examples, see the `examples `__ directory. + +Design Goals +~~~~~~~~~~~~ + +The design of the Cosmos SDK is based on the principles of "capabilities systems". + +Capabilities systems +~~~~~~~~~~~~~~~~~~~~ + +TODO: + +* Need for module isolation +* Capability is implied permission +* Link to thesis + +Tx & Msg +~~~~~~~~ + +The SDK distinguishes between transactions (Tx) and messages +(Msg). A Tx is a Msg wrapped with authentication and fee data. + +Messages +^^^^^^^^ + +Users can create messages containing arbitrary information by +implementing the ``Msg`` interface: + +:: + + type Msg interface { + + // Return the message type. + // Must be alphanumeric or empty. + Type() string + + // Get some property of the Msg. + Get(key interface{}) (value interface{}) + + // Get the canonical byte representation of the Msg. + GetSignBytes() []byte + + // ValidateBasic does a simple validation check that + // doesn't require access to any other information. + ValidateBasic() error + + // Signers returns the addrs of signers that must sign. + // CONTRACT: All signatures must be present to be valid. + // CONTRACT: Returns addrs in some deterministic order. + GetSigners() []crypto.Address + } + +Messages must specify their type via the ``Type()`` method. The type should +correspond to the messages handler, so there can be many messages with the same +type. + +Messages must also specify how they are to be authenticated. The ``GetSigners()`` +method return a list of addresses that must sign the message, while the +``GetSignBytes()`` method returns the bytes that must be signed for a signature +to be valid. + +Addresses in the SDK are arbitrary byte arrays that are hex-encoded when +displayed as a string or rendered in JSON. + +Messages can specify basic self-consistency checks using the ``ValidateBasic()`` +method to enforce that message contents are well formed before any actual logic +begins. + +Finally, messages can provide generic access to their contents via ``Get(key)``, +but this is mostly for convenience and not type-safe. + +For instance, the ``Basecoin`` message types are defined in ``x/bank/tx.go``: + +:: + + type SendMsg struct { + Inputs []Input `json:"inputs"` + Outputs []Output `json:"outputs"` + } + + type IssueMsg struct { + Banker crypto.Address `json:"banker"` + Outputs []Output `json:"outputs"` + } + +Each specifies the addresses that must sign the message: + +:: + + func (msg SendMsg) GetSigners() []crypto.Address { + addrs := make([]crypto.Address, len(msg.Inputs)) + for i, in := range msg.Inputs { + addrs[i] = in.Address + } + return addrs + } + + func (msg IssueMsg) GetSigners() []crypto.Address { + return []crypto.Address{msg.Banker} + } + +Transactions +^^^^^^^^^^^^ + +A transaction is a message with additional information for authentication: + +:: + + type Tx interface { + + GetMsg() Msg + + // The address that pays the base fee for this message. The fee is + // deducted before the Msg is processed. + GetFeePayer() crypto.Address + + // Get the canonical byte representation of the Tx. + // Includes any signatures (or empty slots). + GetTxBytes() []byte + + // Signatures returns the signature of signers who signed the Msg. + // CONTRACT: Length returned is same as length of + // pubkeys returned from MsgKeySigners, and the order + // matches. + // CONTRACT: If the signature is missing (ie the Msg is + // invalid), then the corresponding signature is + // .Empty(). + GetSignatures() []StdSignature + } + +The ``tx.GetSignatures()`` method returns a list of signatures, which must match +the list of addresses returned by ``tx.Msg.GetSigners()``. The signatures come in +a standard form: + +:: + + type StdSignature struct { + crypto.PubKey // optional + crypto.Signature + Sequence int64 + } + +It contains the signature itself, as well as the corresponding account's +sequence number. The sequence number is expected to increment every time a +message is signed by a given account. This prevents "replay attacks", where +the same message could be executed over and over again. + +The ``StdSignature`` can also optionally include the public key for verifying the +signature. An application can store the public key for each address it knows +about, making it optional to include the public key in the transaction. In the +case of Basecoin, the public key only needs to be included in the first +transaction send by a given account - after that, the public key is forever +stored by the application and can be left out of transactions. + +Transactions can also specify the address responsible for paying the +transaction's fees using the ``tx.GetFeePayer()`` method. + +The standard way to create a transaction from a message is to use the ``StdTx``: + +:: + + type StdTx struct { + Msg + Signatures []StdSignature + } + +Encoding and Decoding Transactions +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Messages and transactions are designed to be generic enough for developers to +specify their own encoding schemes. This enables the SDK to be used as the +framwork for constructing already specified cryptocurrency state machines, for +instance Ethereum. + +When initializing an application, a developer must specify a ``TxDecoder`` +function which determines how an arbitrary byte array should be unmarshalled +into a ``Tx``: + +:: + + type TxDecoder func(txBytes []byte) (Tx, error) + +In ``Basecoin``, we use the Tendermint wire format and the ``go-wire`` library for +encoding and decoding all message types. The ``go-wire`` library has the nice +property that it can unmarshal into interface types, but it requires the +relevant types to be registered ahead of type. Registration happens on a +``Codec`` object, so as not to taint the global name space. + +For instance, in ``Basecoin``, we wish to register the ``SendMsg`` and ``IssueMsg`` +types: + +:: + + cdc.RegisterInterface((*sdk.Msg)(nil), nil) + cdc.RegisterConcrete(bank.SendMsg{}, "cosmos-sdk/SendMsg", nil) + cdc.RegisterConcrete(bank.IssueMsg{}, "cosmos-sdk/IssueMsg", nil) + +Note how each concrete type is given a name - these name determine the type's +unique "prefix bytes" during encoding. A registered type will always use the +same prefix-bytes, regardless of what interface it is satisfying. For more +details, see the `go-wire documentation `__. + + +MultiStore +~~~~~~~~~~ + +MultiStore is like a filesystem +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Mounting an IAVLStore +^^^^^^^^^^^^^^^^^^^^^ + +TODO: + +* IAVLStore: Fast balanced dynamic Merkle store. + + * supports iteration. + +* MultiStore: multiple Merkle tree backends in a single store + + * allows using Ethereum Patricia Trie and Tendermint IAVL in same app + +* Provide caching for intermediate state during execution of blocks and transactions (including for iteration) +* Historical state pruning and snapshotting. +* Query proofs (existence, absence, range, etc.) on current and retained historical state. + +Context +------- + +The SDK uses a ``Context`` to propogate common information across functions. The +``Context`` is modelled after the Golang ``context.Context`` object, which has +become ubiquitous in networking middleware and routing applications as a means +to easily propogate request context through handler functions. + +The main information stored in the ``Context`` includes the application +MultiStore (see below), the last block header, and the transaction bytes. +Effectively, the context contains all data that may be necessary for processing +a transaction. + +Many methods on SDK objects receive a context as the first argument. + +Handler +------- + +Transaction processing in the SDK is defined through ``Handler`` functions: + +:: + + type Handler func(ctx Context, tx Tx) Result + +A handler takes a context and a transaction and returns a result. All +information necessary for processing a transaction should be available in the +context. + +While the context holds the entire application state (all referenced from the +root MultiStore), a particular handler only needs a particular kind of access +to a particular store (or two or more). Access to stores is managed using +capabilities keys and mappers. When a handler is initialized, it is passed a +key or mapper that gives it access to the relevant stores. + +:: + + // File: cosmos-sdk/examples/basecoin/app/init_stores.go + app.BaseApp.MountStore(app.capKeyMainStore, sdk.StoreTypeIAVL) + app.accountMapper = auth.NewAccountMapper( + app.capKeyMainStore, // target store + &types.AppAccount{}, // prototype + ) + + // File: cosmos-sdk/examples/basecoin/app/init_handlers.go + app.router.AddRoute("bank", bank.NewHandler(app.accountMapper)) + + // File: cosmos-sdk/x/bank/handler.go + // NOTE: Technically, NewHandler only needs a CoinMapper + func NewHandler(am sdk.AccountMapper) sdk.Handler { + return func(ctx sdk.Context, msg sdk.Msg) sdk.Result { + cm := CoinMapper{am} + ... + } + } + +AnteHandler +----------- + +Handling Fee payment +~~~~~~~~~~~~~~~~~~~~ + +Handling Authentication +~~~~~~~~~~~~~~~~~~~~~~~ + +Accounts and x/auth +------------------- + +sdk.Account +~~~~~~~~~~~ + +auth.BaseAccount +~~~~~~~~~~~~~~~~ + +auth.AccountMapper +~~~~~~~~~~~~~~~~~~ + +Wire codec +---------- + +Why another codec? +~~~~~~~~~~~~~~~~~~ + +vs encoding/json +~~~~~~~~~~~~~~~~ + +vs protobuf +~~~~~~~~~~~ + +Dummy example +------------- + +Basecoin example +---------------- + +The quintessential SDK application is Basecoin - a simple +multi-asset cryptocurrency. Basecoin consists of a set of +accounts stored in a Merkle tree, where each account may have +many coins. There are two message types: SendMsg and IssueMsg. +SendMsg allows coins to be sent around, while IssueMsg allows a +set of predefined users to issue new coins. diff --git a/docs/staking/intro.rst b/docs/staking/intro.rst new file mode 100644 index 0000000000..ca1d002f57 --- /dev/null +++ b/docs/staking/intro.rst @@ -0,0 +1,270 @@ +Using Gaia +========== + +This project is a demonstration of the Cosmos Hub with staking functionality; it is +designed to get validator acquianted with staking concepts and procedure. + +Potential validators will be declaring their candidacy, after which users can +delegate and, if they so wish, unbond. This can be practiced using a local or +public testnet. + +Install +------- + +The ``gaia`` tooling is an extension of the Cosmos-SDK; to install: + +:: + + go get github.com/cosmos/gaia + cd $GOPATH/src/github.com/cosmos/gaia + make get_vendor_deps + make install + +It has three primary commands: + +:: + + Available Commands: + node The Cosmos Network delegation-game blockchain test + rest-server REST client for gaia commands + client Gaia light client + + version Show version info + help Help about any command + +and a handful of flags that are highlighted only as necessary. + +The ``gaia node`` command is a proxt for running a tendermint node. You'll be using +this command to either initialize a new node, or - using existing files - joining +the testnet. + +The ``gaia rest-server`` command is used by the `cosmos UI `__. + +Lastly, the ``gaia client`` command is the workhorse of the staking module. It allows +for sending various transactions and other types of interaction with a running chain. +that you've setup or joined a testnet. + +Generating Keys +--------------- + +Review the `key management tutorial <../key-management.html>`__ and create one key +if you'll be joining the public testnet, and three keys if you'll be trying out a local +testnet. + +Setup Testnet +------------- + +The first thing you'll want to do is either `create a local testnet <./local-testnet.html>`__ or +join a `public testnet <./public-testnet.html>`__. Either step is required before proceeding. + +The rest of this tutorial will assume a local testnet with three participants: ``alice`` will be +the initial validator, ``bob`` will first receives tokens from ``alice`` then declare candidacy +as a validator, and ``charlie`` will bond then unbond to ``bob``. If you're joining the public +testnet, the token amounts will need to be adjusted. + +Sending Tokens +-------------- + +We'll have ``alice`` who is currently quite rich, send some ``fermions`` to ``bob``: + +:: + + gaia client tx send --amount=1000fermion --sequence=1 --name=alice --to=5A35E4CC7B7DC0A5CB49CEA91763213A9AE92AD6 + +where the ``--sequence`` flag is to be incremented for each transaction, the ``--name`` flag names the sender, and the ``--to`` flag takes ``bob``'s address. You'll see something like: + +:: + + Please enter passphrase for alice: + { + "check_tx": { + "gas": 30 + }, + "deliver_tx": { + "tags": [ + { + "key": "height", + "value_type": 1, + "value_int": 2963 + }, + { + "key": "coin.sender", + "value_string": "5D93A6059B6592833CBC8FA3DA90EE0382198985" + }, + { + "key": "coin.receiver", + "value_string": "5A35E4CC7B7DC0A5CB49CEA91763213A9AE92AD6" + } + ] + }, + "hash": "423BD7EA3C4B36AF8AFCCA381C0771F8A698BA77", + "height": 2963 + } + +Check out ``bob``'s account, which should now have 992 fermions: + +:: + + gaia client query account 5A35E4CC7B7DC0A5CB49CEA91763213A9AE92AD6 + +Adding a Second Validator +------------------------- + +Next, let's add the second node as a validator. + +First, we need the pub_key data: + +:: + + cat $HOME/.gaia2/priv_validator.json + +the first part will look like: + +:: + + {"address":"7B78527942C831E16907F10C3263D5ED933F7E99","pub_key":{"type":"ed25519","data":"96864CE7085B2E342B0F96F2E92B54B18C6CC700186238810D5AA7DFDAFDD3B2"}, + +and you want the ``pub_key`` ``data`` that starts with ``96864CE``. + +Now ``bob`` can declare candidacy to that pubkey: + +:: + + gaia client tx declare-candidacy --amount=10fermion --name=bob --pubkey= --moniker=bobby + +with an output like: + +:: + + Please enter passphrase for bob: + { + "check_tx": { + "gas": 30 + }, + "deliver_tx": {}, + "hash": "2A2A61FFBA1D7A59138E0068C82CC830E5103799", + "height": 4075 + } + + +We should see ``bob``'s account balance decrease by 10 fermions: + +:: + + gaia client query account 5D93A6059B6592833CBC8FA3DA90EE0382198985 + +To confirm for certain the new validator is active, ask the tendermint node: + +:: + + curl localhost:46657/validators + +If you now kill either node, blocks will stop streaming in, because +there aren't enough validators online. Turn it back on and they will +start streaming again. + +Now that ``bob`` has declared candidacy, which essentially bonded 10 fermions and made him a validator, we're going to get ``charlie`` to delegate some coins to ``bob``. + +Delegating +---------- + +First let's have ``alice`` send some coins to ``charlie``: + +:: + + gaia client tx send --amount=1000fermion --sequence=2 --name=alice --to=48F74F48281C89E5E4BE9092F735EA519768E8EF + +Then ``charlie`` will delegate some fermions to ``bob``: + +:: + + gaia client tx delegate --amount=10fermion --name=charlie --pubkey= + +You'll see output like: + +:: + + Please enter passphrase for charlie: + { + "check_tx": { + "gas": 30 + }, + "deliver_tx": {}, + "hash": "C3443BA30FCCC1F6E3A3D6AAAEE885244F8554F0", + "height": 51585 + } + +And that's it. You can query ``charlie``'s account to see the decrease in fermions. + +To get more information about the candidate, try: + +:: + + gaia client query candidate --pubkey= + +and you'll see output similar to: + +:: + + { + "height": 51899, + "data": { + "pub_key": { + "type": "ed25519", + "data": "52D6FCD8C92A97F7CCB01205ADF310A18411EA8FDCC10E65BF2FCDB05AD1689B" + }, + "owner": { + "chain": "", + "app": "sigs", + "addr": "5A35E4CC7B7DC0A5CB49CEA91763213A9AE92AD6" + }, + "shares": 20, + "voting_power": 20, + "description": { + "moniker": "bobby", + "identity": "", + "website": "", + "details": "" + } + } + } + +It's also possible the query the delegator's bond like so: + +:: + + gaia client query delegator-bond --delegator-address 48F74F48281C89E5E4BE9092F735EA519768E8EF --pubkey 52D6FCD8C92A97F7CCB01205ADF310A18411EA8FDCC10E65BF2FCDB05AD1689B + +with an output similar to: + +:: + + { + "height": 325782, + "data": { + "PubKey": { + "type": "ed25519", + "data": "52D6FCD8C92A97F7CCB01205ADF310A18411EA8FDCC10E65BF2FCDB05AD1689B" + }, + "Shares": 20 + } + } + + +where the ``--delegator-address`` is ``charlie``'s address and the ``-pubkey`` is the same as we've been using. + + +Unbonding +--------- + +Finally, to relinquish your voting power, unbond some coins. You should see +your VotingPower reduce and your account balance increase. + +:: + + gaia client tx unbond --amount=5fermion --name=charlie --pubkey= + gaia client query account 48F74F48281C89E5E4BE9092F735EA519768E8EF + +See the bond decrease with ``gaia client query delegator-bond`` like above. + +That concludes an overview of the ``gaia`` tooling for local testing. diff --git a/docs/staking/key-management.rst b/docs/staking/key-management.rst new file mode 100644 index 0000000000..ebeca0e445 --- /dev/null +++ b/docs/staking/key-management.rst @@ -0,0 +1,204 @@ +Key Management +============== + +Here we explain a bit how to work with your keys, using the +``gaia client keys`` subcommand. + +**Note:** This keys tooling is not considered production ready and is +for dev only. + +We'll look at what you can do using the six sub-commands of +``gaia client keys``: + +:: + + new + list + get + delete + recover + update + +Create keys +----------- + +``gaia client keys new`` has two inputs (name, password) and two outputs +(address, seed). + +First, we name our key: + +:: + + gaia client keys new alice + +This will prompt (10 character minimum) password entry which must be +re-typed. You'll see: + +:: + + Enter a passphrase: + Repeat the passphrase: + alice A159C96AE911F68913E715ED889D211C02EC7D70 + **Important** write this seed phrase in a safe place. + It is the only way to recover your account if you ever forget your password. + + pelican amateur empower assist awkward claim brave process cliff save album pigeon intact asset + +which shows the address of your key named ``alice``, and its recovery +seed. We'll use these shortly. + +Adding the ``--output json`` flag to the above command would give this +output: + +:: + + Enter a passphrase: + Repeat the passphrase: + { + "key": { + "name": "alice", + "address": "A159C96AE911F68913E715ED889D211C02EC7D70", + "pubkey": { + "type": "ed25519", + "data": "4BF22554B0F0BF2181187E5E5456E3BF3D96DB4C416A91F07F03A9C36F712B77" + } + }, + "seed": "pelican amateur empower assist awkward claim brave process cliff save album pigeon intact asset" + } + +To avoid the prompt, it's possible to pipe the password into the +command, e.g.: + +:: + + echo 1234567890 | gaia client keys new fred --output json + +After trying each of the three ways to create a key, look at them, use: + +:: + + gaia client keys list + +to list all the keys: + +:: + + All keys: + alice 6FEA9C99E2565B44FCC3C539A293A1378CDA7609 + bob A159C96AE911F68913E715ED889D211C02EC7D70 + charlie 784D623E0C15DE79043C126FA6449B68311339E5 + +Again, we can use the ``--output json`` flag: + +:: + + [ + { + "name": "alice", + "address": "6FEA9C99E2565B44FCC3C539A293A1378CDA7609", + "pubkey": { + "type": "ed25519", + "data": "878B297F1E863CC30CAD71E04A8B3C23DB71C18F449F39E35B954EDB2276D32D" + } + }, + { + "name": "bob", + "address": "A159C96AE911F68913E715ED889D211C02EC7D70", + "pubkey": { + "type": "ed25519", + "data": "2127CAAB96C08E3042C5B33C8B5A820079AAE8DD50642DCFCC1E8B74821B2BB9" + } + }, + { + "name": "charlie", + "address": "784D623E0C15DE79043C126FA6449B68311339E5", + "pubkey": { + "type": "ed25519", + "data": "4BF22554B0F0BF2181187E5E5456E3BF3D96DB4C416A91F07F03A9C36F712B77" + } + }, + ] + +to get machine readable output. + +If we want information about one specific key, then: + +:: + + gaia client keys get charlie --output json + +will, for example, return the info for only the "charlie" key returned +from the previous ``gaia client keys list`` command. + +The keys tooling can support different types of keys with a flag: + +:: + + gaia client keys new bit --type secp256k1 + +and you'll see the difference in the ``"type": field from``\ gaia client +keys get\` + +Before moving on, let's set an enviroment variable to make +``--output json`` the default. + +Either run or put in your ``~/.bash_profile`` the following line: + +:: + + export BC_OUTPUT=json + +Recover a key +------------- + +Let's say, for whatever reason, you lose a key or forget the password. +On creation, you were given a seed. We'll use it to recover a lost key. + +First, let's simulate the loss by deleting a key: + +:: + + gaia client keys delete alice + +which prompts for your current password, now rendered obsolete, and +gives a warning message. The only way you can recover your key now is +using the 12 word seed given on initial creation of the key. Let's try +it: + +:: + + gaia client keys recover alice-again + +which prompts for a new password then the seed: + +:: + + Enter the new passphrase: + Enter your recovery seed phrase: + strike alien praise vendor term left market practice junior better deputy divert front calm + alice-again CBF5D9CE6DDCC32806162979495D07B851C53451 + +and voila! You've recovered your key. Note that the seed can be typed +out, pasted in, or piped into the command alongside the password. + +To change the password of a key, we can: + +:: + + gaia client keys update alice-again + +and follow the prompts. + +That covers most features of the keys sub command. + +.. raw:: html + + diff --git a/docs/staking/local-testnet.rst b/docs/staking/local-testnet.rst new file mode 100644 index 0000000000..e3f69bded1 --- /dev/null +++ b/docs/staking/local-testnet.rst @@ -0,0 +1,83 @@ +Local Testnet +============= + +This tutorial demonstrates the basics of setting up a gaia +testnet locally. + +If you haven't already made a key, make one now: + +:: + + gaia client keys new alice + +otherwise, use an existing key. + +Initialize The Chain +-------------------- + +Now initialize a gaia chain, using ``alice``'s address: + +:: + + gaia node init 5D93A6059B6592833CBC8FA3DA90EE0382198985 --home=$HOME/.gaia1 --chain-id=gaia-test + +This will create all the files necessary to run a single node chain in +``$HOME/.gaia1``: a ``priv_validator.json`` file with the validators +private key, and a ``genesis.json`` file with the list of validators and +accounts. + +We'll add a second node on our local machine by initiating a node in a +new directory, with the same address, and copying in the genesis: + +:: + + gaia node init 5D93A6059B6592833CBC8FA3DA90EE0382198985 --home=$HOME/.gaia2 --chain-id=gaia-test + cp $HOME/.gaia1/genesis.json $HOME/.gaia2/genesis.json + +We also need to modify ``$HOME/.gaia2/config.toml`` to set new seeds +and ports. It should look like: + +:: + + proxy_app = "tcp://127.0.0.1:46668" + moniker = "anonymous" + fast_sync = true + db_backend = "leveldb" + log_level = "state:info,*:error" + + [rpc] + laddr = "tcp://0.0.0.0:46667" + + [p2p] + laddr = "tcp://0.0.0.0:46666" + seeds = "0.0.0.0:46656" + +Start Nodes +----------- + +Now that we've initialized the chains, we can start both nodes: + +NOTE: each command below must be started in seperate terminal windows. Alternatively, to run this testnet across multiple machines, you'd replace the ``seeds = "0.0.0.0"`` in ``~/.gaia2.config.toml`` with the IP of the first node, and could skip the modifications we made to the config file above because port conflicts would be avoided. + +:: + + gaia node start --home=$HOME/.gaia1 + gaia node start --home=$HOME/.gaia2 + +Now we can initialize a client for the first node, and look up our +account: + +:: + + gaia client init --chain-id=gaia-test --node=tcp://localhost:46657 + gaia client query account 5D93A6059B6592833CBC8FA3DA90EE0382198985 + +To see what tendermint considers the validator set is, use: + +:: + + curl localhost:46657/validators + +and compare the information in this file: ``~/.gaia1/priv_validator.json``. The ``address`` and ``pub_key`` fields should match. + +To add a second validator on your testnet, you'll need to bond some tokens be declaring candidacy. diff --git a/docs/staking/public-testnet.rst b/docs/staking/public-testnet.rst new file mode 100644 index 0000000000..440a084499 --- /dev/null +++ b/docs/staking/public-testnet.rst @@ -0,0 +1,64 @@ +Public Testnets +=============== + +Here we'll cover the basics of joining a public testnet. These testnets +come and go with various names are we release new versions of tendermint +core. This tutorial covers joining the ``gaia-1`` testnet. To join +other testnets, choose different initialization files, described below. + +Get Tokens +---------- + +If you haven't already `created a key <../key-management.html>`__, +do so now. Copy your key's address and enter it into +`this utility `__ which will send you +some ``fermion`` testnet tokens. + +Get Files +--------- + +Now, to sync with the testnet, we need the genesis file and seeds. The +easiest way to get them is to clone and navigate to the tendermint +testnet repo: + +:: + + git clone https://github.com/tendermint/testnets ~/testnets + cd ~/testnets/gaia-1/gaia + +NOTE: to join a different testnet, change the ``gaia-1/gaia`` filepath +to another directory with testnet inititalization files *and* an +active testnet. + +Start Node +---------- + +Now we can start a new node:it may take awhile to sync with the +existing testnet. + +:: + + gaia node start --home=$HOME/testnets/gaia-1/gaia + +Once blocks slow down to about one per second, you're all caught up. + +The ``gaia node start`` command will automaticaly generate a validator +private key found in ``~/testnets/gaia-1/gaia/priv_validator.json``. + +Finally, let's initialize the gaia client to interact with the testnet: + +:: + + gaia client init --chain-id=gaia-1 --node=tcp://localhost:46657 + +and check our balance: + +:: + + gaia client query account $MYADDR + +Where ``$MYADDR`` is the address originally generated by ``gaia keys new bob``. + +You are now ready to declare candidacy or delegate some fermions. See the +`staking module overview <./staking-module.html>`__ for more information +on using the ``gaia client``. diff --git a/docs/x/replay-protection.rst b/docs/x/replay-protection.rst new file mode 100644 index 0000000000..d262add974 --- /dev/null +++ b/docs/x/replay-protection.rst @@ -0,0 +1,38 @@ +Replay Protection +----------------- + +In order to prevent `replay +attacks `__ a multi account +nonce system has been constructed as a module, which can be found in +``modules/nonce``. By adding the nonce module to the stack, each +transaction is verified for authenticity against replay attacks. This is +achieved by requiring that a new signed copy of the sequence number +which must be exactly 1 greater than the sequence number of the previous +transaction. A distinct sequence number is assigned per chain-id, +application, and group of signers. Each sequence number is tracked as a +nonce-store entry where the key is the marshaled list of actors after +having been sorted by chain, app, and address. + +.. code:: golang + + // Tx - Nonce transaction structure, contains list of signers and current sequence number + type Tx struct { + Sequence uint32 `json:"sequence"` + Signers []basecoin.Actor `json:"signers"` + Tx basecoin.Tx `json:"tx"` + } + +By distinguishing sequence numbers across groups of Signers, +multi-signature Actors need not lock up use of their Address while +waiting for all the members of a multi-sig transaction to occur. Instead +only the multi-sig account will be locked, while other accounts +belonging to that signer can be used and signed with other sequence +numbers. + +By abstracting out the nonce module in the stack, entire series of +transactions can occur without needing to verify the nonce for each +member of the series. An common example is a stack which will send coins +and charge a fee. Within the SDK this can be achieved using separate +modules in a stack, one to send the coins and the other to charge the +fee, however both modules do not need to check the nonce. This can occur +as a separate module earlier in the stack. diff --git a/publish/basecoin-builder/Dockerfile b/publish/basecoin-builder/Dockerfile deleted file mode 100644 index 28726034b7..0000000000 --- a/publish/basecoin-builder/Dockerfile +++ /dev/null @@ -1,12 +0,0 @@ -FROM golang:1.7.4 - -RUN apt-get update && apt-get install -y --no-install-recommends \ - zip \ - && rm -rf /var/lib/apt/lists/* - -# We want to ensure that release builds never have any cgo dependencies so we -# switch that off at the highest level. -ENV CGO_ENABLED 0 - -RUN mkdir -p $GOPATH/src/github.com/tendermint/basecoin -WORKDIR $GOPATH/src/github.com/tendermint/basecoin diff --git a/publish/dist.sh b/publish/dist.sh deleted file mode 100755 index 6f488477bc..0000000000 --- a/publish/dist.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash -set -e - -REPO_NAME="basecoin" - -# Get the version from the environment, or try to figure it out. -if [ -z $VERSION ]; then - VERSION=$(awk -F\" '/Version =/ { print $2; exit }' < version/version.go) -fi -if [ -z "$VERSION" ]; then - echo "Please specify a version." - exit 1 -fi -echo "==> Building version $VERSION..." - -# Get the parent directory of where this script is. -SOURCE="${BASH_SOURCE[0]}" -while [ -h "$SOURCE" ] ; do SOURCE="$(readlink "$SOURCE")"; done -DIR="$( cd -P "$( dirname "$SOURCE" )/.." && pwd )" - -# Change into that dir because we expect that. -cd "$DIR" - -# Delete the old dir -echo "==> Removing old directory..." -rm -rf build/pkg -mkdir -p build/pkg - -# Do a hermetic build inside a Docker container. -docker build -t tendermint/${REPO_NAME}-builder scripts/${REPO_NAME}-builder/ -docker run --rm -e "BUILD_TAGS=$BUILD_TAGS" -v "$(pwd)":/go/src/github.com/tendermint/${REPO_NAME} tendermint/${REPO_NAME}-builder ./scripts/dist_build.sh - -# Add $REPO_NAME and $VERSION prefix to package name. -rm -rf ./build/dist -mkdir -p ./build/dist -for FILENAME in $(find ./build/pkg -mindepth 1 -maxdepth 1 -type f); do - FILENAME=$(basename "$FILENAME") - cp "./build/pkg/${FILENAME}" "./build/dist/${REPO_NAME}_${VERSION}_${FILENAME}" -done - -# Make the checksums. -pushd ./build/dist -shasum -a256 ./* > "./${REPO_NAME}_${VERSION}_SHA256SUMS" -popd - -# Done -echo -echo "==> Results:" -ls -hl ./build/dist - -exit 0 diff --git a/publish/dist_build.sh b/publish/dist_build.sh deleted file mode 100755 index 036ee7735c..0000000000 --- a/publish/dist_build.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env bash -set -e - -# Get the parent directory of where this script is. -SOURCE="${BASH_SOURCE[0]}" -while [ -h "$SOURCE" ] ; do SOURCE="$(readlink "$SOURCE")"; done -DIR="$( cd -P "$( dirname "$SOURCE" )/.." && pwd )" - -# Change into that dir because we expect that. -cd "$DIR" - -# Get the git commit -GIT_COMMIT="$(git rev-parse --short HEAD)" -GIT_DESCRIBE="$(git describe --tags --always)" -GIT_IMPORT="github.com/tendermint/basecoin/version" - -# Determine the arch/os combos we're building for -XC_ARCH=${XC_ARCH:-"386 amd64 arm"} -XC_OS=${XC_OS:-"solaris darwin freebsd linux windows"} - -# Make sure build tools are available. -make tools - -# Get VENDORED dependencies -make get_vendor_deps - -# Build! -echo "==> Building basecoin..." -"$(which gox)" \ - -os="${XC_OS}" \ - -arch="${XC_ARCH}" \ - -osarch="!darwin/arm !solaris/amd64 !freebsd/amd64" \ - -ldflags "-X ${GIT_IMPORT}.GitCommit='${GIT_COMMIT}' -X ${GIT_IMPORT}.GitDescribe='${GIT_DESCRIBE}'" \ - -output "build/pkg/{{.OS}}_{{.Arch}}/basecoin" \ - -tags="${BUILD_TAGS}" \ - github.com/tendermint/basecoin/cmd/basecoin - -echo "==> Building basecli..." -"$(which gox)" \ - -os="${XC_OS}" \ - -arch="${XC_ARCH}" \ - -osarch="!darwin/arm !solaris/amd64 !freebsd/amd64" \ - -ldflags "-X ${GIT_IMPORT}.GitCommit='${GIT_COMMIT}' -X ${GIT_IMPORT}.GitDescribe='${GIT_DESCRIBE}'" \ - -output "build/pkg/{{.OS}}_{{.Arch}}/basecli" \ - -tags="${BUILD_TAGS}" \ - github.com/tendermint/basecoin/cmd/basecli - -# Zip all the files. -echo "==> Packaging..." -for PLATFORM in $(find ./build/pkg -mindepth 1 -maxdepth 1 -type d); do - OSARCH=$(basename "${PLATFORM}") - echo "--> ${OSARCH}" - - pushd "$PLATFORM" >/dev/null 2>&1 - zip "../${OSARCH}.zip" ./* - popd >/dev/null 2>&1 -done - - -exit 0 diff --git a/publish/print_test_account.go b/publish/print_test_account.go deleted file mode 100644 index f8e3fe66ab..0000000000 --- a/publish/print_test_account.go +++ /dev/null @@ -1,23 +0,0 @@ -// +build scripts - -package main - -import ( - "fmt" - - "github.com/cosmos/cosmos-sdk/tests" - "github.com/tendermint/go-wire" -) - -/* -PrivKey: 019F86D081884C7D659A2FEAA0C55AD015A3BF4F1B2B0B822CD15D6C15B0F00A0867D3B5EAF0C0BF6B5A602D359DAECC86A7A74053490EC37AE08E71360587C870 -PubKey: 0167D3B5EAF0C0BF6B5A602D359DAECC86A7A74053490EC37AE08E71360587C870 -Address: D9B727742AA29FA638DC63D70813C976014C4CE0 -*/ -func main() { - tAcc := tests.PrivAccountFromSecret("test") - fmt.Printf("PrivKey:%X\n", tAcc.PrivKey.Bytes()) - fmt.Printf("PubKey:%X\n", tAcc.Account.PubKey.Bytes()) - fmt.Printf("Address:%X\n", tAcc.Account.PubKey.Address()) - fmt.Println(string(wire.JSONBytesPretty(tAcc))) -} diff --git a/publish/publish.sh b/publish/publish.sh deleted file mode 100644 index 738dbdd415..0000000000 --- a/publish/publish.sh +++ /dev/null @@ -1,7 +0,0 @@ -#! /bin/bash - -# Get the version from the environment, or try to figure it out. -if [ -z $VERSION ]; then - VERSION=$(awk -F\" '/Version =/ { print $2; exit }' < version/version.go) -fi -aws s3 cp --recursive build/dist s3://tendermint/binaries/basecoin/v${VERSION} --acl public-read diff --git a/tests/cli/common.sh b/tests/cli/common.sh deleted file mode 100644 index aaeaca500c..0000000000 --- a/tests/cli/common.sh +++ /dev/null @@ -1,260 +0,0 @@ -# This is not executable, but helper functions for the other scripts - -# XXX XXX XXX XXX XXX -# The following global variables must be defined before calling common functions: -# SERVER_EXE=foobar # Server binary name -# CLIENT_EXE=foobarcli # Client binary name -# ACCOUNTS=(foo bar) # List of accounts for initialization -# RICH=${ACCOUNTS[0]} # Account to assign genesis balance - - -# XXX Ex Usage: quickSetup $WORK_NAME $CHAIN_ID -# Desc: Start the program, use with shunit2 OneTimeSetUp() -quickSetup() { - # These are passed in as args - BASE_DIR=$HOME/$1 - CHAIN_ID=$2 - - rm -rf $BASE_DIR 2>/dev/null - mkdir -p $BASE_DIR - - # Set up client - make sure you use the proper prefix if you set - # a custom CLIENT_EXE - export BC_HOME=${BASE_DIR}/client - prepareClient - - # start basecoin server (with counter) - initServer $BASE_DIR $CHAIN_ID - if [ $? != 0 ]; then return 1; fi - - initClient $CHAIN_ID - if [ $? != 0 ]; then return 1; fi - - printf "...Testing may begin!\n\n\n" -} - -# XXX Ex Usage: quickTearDown -# Desc: close the test server, use with shunit2 OneTimeTearDown() -quickTearDown() { - printf "\n\nstopping $SERVER_EXE test server..." - kill -9 $PID_SERVER >/dev/null 2>&1 - sleep 1 -} - -############################################################ - -prepareClient() { - echo "Preparing client keys..." - ${CLIENT_EXE} reset_all - assertTrue "line=${LINENO}, prepare client" $? - - for i in "${!ACCOUNTS[@]}"; do - newKey ${ACCOUNTS[$i]} - done -} - -# XXX Ex Usage1: initServer $ROOTDIR $CHAINID -# XXX Ex Usage2: initServer $ROOTDIR $CHAINID $PORTPREFIX -# Desc: Grabs the Rich account and gives it all genesis money -# port-prefix default is 4665{6,7,8} -initServer() { - echo "Setting up genesis..." - SERVE_DIR=$1/server - assertNotNull "line=${LINENO}, no chain" $2 - CHAIN=$2 - SERVER_LOG=$1/${SERVER_EXE}.log - - GENKEY=$(${CLIENT_EXE} keys get ${RICH} | awk '{print $2}') - ${SERVER_EXE} init --static --chain-id $CHAIN $GENKEY --home=$SERVE_DIR >>$SERVER_LOG - - # optionally set the port - if [ -n "$3" ]; then - echo "setting port $3" - sed -ie "s/4665/$3/" $SERVE_DIR/config.toml - fi - - echo "Starting ${SERVER_EXE} server..." - startServer $SERVE_DIR $SERVER_LOG - return $? -} - -# XXX Ex Usage: startServer $SERVE_DIR $SERVER_LOG -startServer() { - ${SERVER_EXE} start --home=$1 >>$2 2>&1 & - sleep 5 - PID_SERVER=$! - disown - if ! ps $PID_SERVER >/dev/null; then - echo "**FAILED**" - cat $SERVER_LOG - return 1 - fi -} - -# XXX Ex Usage1: initClient $CHAINID -# XXX Ex Usage2: initClient $CHAINID $PORTPREFIX -# Desc: Initialize the client program -# port-prefix default is 46657 -initClient() { - echo "Attaching ${CLIENT_EXE} client..." - PORT=${2:-46657} - # hard-code the expected validator hash - ${CLIENT_EXE} init --chain-id=$1 --node=tcp://localhost:${PORT} --valhash=EB168E17E45BAEB194D4C79067FFECF345C64DE6 - assertTrue "line=${LINENO}, initialized light-client" $? -} - -# XXX Ex Usage1: newKey $NAME -# XXX Ex Usage2: newKey $NAME $PASSWORD -# Desc: Generates key for given username and password -newKey(){ - assertNotNull "line=${LINENO}, keyname required" "$1" - KEYPASS=${2:-qwertyuiop} - (echo $KEYPASS; echo $KEYPASS) | ${CLIENT_EXE} keys new $1 >/dev/null 2>/dev/null - assertTrue "line=${LINENO}, created $1" $? - assertTrue "line=${LINENO}, $1 doesn't exist" "${CLIENT_EXE} keys get $1" -} - -# XXX Ex Usage: getAddr $NAME -# Desc: Gets the address for a key name -getAddr() { - assertNotNull "line=${LINENO}, keyname required" "$1" - RAW=$(${CLIENT_EXE} keys get $1) - assertTrue "line=${LINENO}, no key for $1" $? - # print the addr - echo $RAW | cut -d' ' -f2 -} - -# XXX Ex Usage: checkAccount $ADDR $AMOUNT [$HEIGHT] -# Desc: Assumes just one coin, checks the balance of first coin in any case -# pass optional height to query which block to query -checkAccount() { - # default height of 0, but accept an argument - HEIGHT=${3:-0} - - # make sure sender goes down - ACCT=$(${CLIENT_EXE} query account $1 --height=$HEIGHT) - if ! assertTrue "line=${LINENO}, account must exist" $?; then - return 1 - fi - - if [ -n "$DEBUG" ]; then echo $ACCT; echo; fi - assertEquals "line=${LINENO}, proper money" "$2" $(echo $ACCT | jq .data.coins[0].amount) - return $? -} - -# XXX Ex Usage: checkRole $ROLE $SIGS $NUM_SIGNERS [$HEIGHT] -# Desc: Ensures this named role exists, and has the number of members and required signatures as above -checkRole() { - # default height of 0, but accept an argument - HEIGHT=${4:-0} - - # make sure sender goes down - QROLE=$(${CLIENT_EXE} query role $1 --height=$HEIGHT) - if ! assertTrue "line=${LINENO}, role must exist" $?; then - return 1 - fi - - if [ -n "$DEBUG" ]; then echo $QROLE; echo; fi - assertEquals "line=${LINENO}, proper sigs" "$2" $(echo $QROLE | jq .data.min_sigs) - assertEquals "line=${LINENO}, proper app" '"sigs"' $(echo $QROLE | jq '.data.signers[0].app' ) - assertEquals "line=${LINENO}, proper signers" "$3" $(echo $QROLE | jq '.data.signers | length') - return $? -} - - -# XXX Ex Usage: txSucceeded $? "$TX" "$RECIEVER" -# Desc: Must be called right after the `tx` command, makes sure it got a success response -txSucceeded() { - if (assertTrue "line=${LINENO}, sent tx ($3): $2" $1); then - TX=$2 - assertEquals "line=${LINENO}, good check ($3): $TX" "0" $(echo $TX | jq .check_tx.code) - assertEquals "line=${LINENO}, good deliver ($3): $TX" "0" $(echo $TX | jq .deliver_tx.code) - else - return 1 - fi -} - -# XXX Ex Usage: checkSendTx $HASH $HEIGHT $SENDER $AMOUNT -# Desc: This looks up the tx by hash, and makes sure the height and type match -# and that the first input was from this sender for this amount -checkSendTx() { - TX=$(${CLIENT_EXE} query tx $1) - assertTrue "line=${LINENO}, found tx" $? - if [ -n "$DEBUG" ]; then echo $TX; echo; fi - - assertEquals "line=${LINENO}, proper height" $2 $(echo $TX | jq .height) - assertEquals "line=${LINENO}, type=sigs/one" '"sigs/one"' $(echo $TX | jq .data.type) - CTX=$(echo $TX | jq .data.data.tx) - assertEquals "line=${LINENO}, type=chain/tx" '"chain/tx"' $(echo $CTX | jq .type) - NTX=$(echo $CTX | jq .data.tx) - assertEquals "line=${LINENO}, type=nonce" '"nonce"' $(echo $NTX | jq .type) - STX=$(echo $NTX | jq .data.tx) - assertEquals "line=${LINENO}, type=coin/send" '"coin/send"' $(echo $STX | jq .type) - assertEquals "line=${LINENO}, proper sender" "\"$3\"" $(echo $STX | jq .data.inputs[0].address.addr) - assertEquals "line=${LINENO}, proper out amount" "$4" $(echo $STX | jq .data.outputs[0].coins[0].amount) - return $? -} - -# XXX Ex Usage: checkRoleTx $HASH $HEIGHT $NAME $NUM_SIGNERS -# Desc: This looks up the tx by hash, and makes sure the height and type match -# and that the it refers to the proper role -checkRoleTx() { - TX=$(${CLIENT_EXE} query tx $1) - assertTrue "line=${LINENO}, found tx" $? - if [ -n "$DEBUG" ]; then echo $TX; echo; fi - - - assertEquals "line=${LINENO}, proper height" $2 $(echo $TX | jq .height) - assertEquals "line=${LINENO}, type=sigs/one" '"sigs/one"' $(echo $TX | jq .data.type) - CTX=$(echo $TX | jq .data.data.tx) - assertEquals "line=${LINENO}, type=chain/tx" '"chain/tx"' $(echo $CTX | jq .type) - NTX=$(echo $CTX | jq .data.tx) - assertEquals "line=${LINENO}, type=nonce" '"nonce"' $(echo $NTX | jq .type) - RTX=$(echo $NTX | jq .data.tx) - assertEquals "line=${LINENO}, type=role/create" '"role/create"' $(echo $RTX | jq .type) - assertEquals "line=${LINENO}, proper name" "\"$3\"" $(echo $RTX | jq .data.role) - assertEquals "line=${LINENO}, proper num signers" "$4" $(echo $RTX | jq '.data.signers | length') - return $? -} - - -# XXX Ex Usage: checkSendFeeTx $HASH $HEIGHT $SENDER $AMOUNT $FEE -# Desc: This is like checkSendTx, but asserts a feetx wrapper with $FEE value. -# This looks up the tx by hash, and makes sure the height and type match -# and that the first input was from this sender for this amount -checkSendFeeTx() { - TX=$(${CLIENT_EXE} query tx $1) - assertTrue "line=${LINENO}, found tx" $? - if [ -n "$DEBUG" ]; then echo $TX; echo; fi - - assertEquals "line=${LINENO}, proper height" $2 $(echo $TX | jq .height) - assertEquals "line=${LINENO}, type=sigs/one" '"sigs/one"' $(echo $TX | jq .data.type) - CTX=$(echo $TX | jq .data.data.tx) - assertEquals "line=${LINENO}, type=chain/tx" '"chain/tx"' $(echo $CTX | jq .type) - NTX=$(echo $CTX | jq .data.tx) - assertEquals "line=${LINENO}, type=nonce" '"nonce"' $(echo $NTX | jq .type) - FTX=$(echo $NTX | jq .data.tx) - assertEquals "line=${LINENO}, type=fee/tx" '"fee/tx"' $(echo $FTX | jq .type) - assertEquals "line=${LINENO}, proper fee" "$5" $(echo $FTX | jq .data.fee.amount) - STX=$(echo $FTX | jq .data.tx) - assertEquals "line=${LINENO}, type=coin/send" '"coin/send"' $(echo $STX | jq .type) - assertEquals "line=${LINENO}, proper sender" "\"$3\"" $(echo $STX | jq .data.inputs[0].address.addr) - assertEquals "line=${LINENO}, proper out amount" "$4" $(echo $STX | jq .data.outputs[0].coins[0].amount) - return $? -} - - -# XXX Ex Usage: waitForBlock $port -# Desc: Waits until the block height on that node increases by one -waitForBlock() { - addr=http://localhost:$1 - b1=`curl -s $addr/status | jq .result.latest_block_height` - b2=$b1 - while [ "$b2" == "$b1" ]; do - echo "Waiting for node $addr to commit a block ..." - sleep 1 - b2=`curl -s $addr/status | jq .result.latest_block_height` - done -} - - diff --git a/tests/cli/shunit2 b/tests/cli/shunit2 deleted file mode 100644 index f347ca565d..0000000000 --- a/tests/cli/shunit2 +++ /dev/null @@ -1,1067 +0,0 @@ -#! /bin/sh -# $Id$ -# vim:et:ft=sh:sts=2:sw=2 -# -# Copyright 2008 Kate Ward. All Rights Reserved. -# Released under the LGPL (GNU Lesser General Public License) -# -# shUnit2 -- Unit testing framework for Unix shell scripts. -# http://code.google.com/p/shunit2/ -# -# Author: kate.ward@forestent.com (Kate Ward) -# -# shUnit2 is a xUnit based unit test framework for Bourne shell scripts. It is -# based on the popular JUnit unit testing framework for Java. - -# return if shunit already loaded -[ -n "${SHUNIT_VERSION:-}" ] && exit 0 -SHUNIT_VERSION='2.1.7pre' - -# return values that scripts can use -SHUNIT_TRUE=0 -SHUNIT_FALSE=1 -SHUNIT_ERROR=2 - -# logging functions -_shunit_warn() { echo "shunit2:WARN $@" >&2; } -_shunit_error() { echo "shunit2:ERROR $@" >&2; } -_shunit_fatal() { echo "shunit2:FATAL $@" >&2; exit ${SHUNIT_ERROR}; } - -# determine some reasonable command defaults -__SHUNIT_UNAME_S=`uname -s` -case "${__SHUNIT_UNAME_S}" in - BSD) __SHUNIT_EXPR_CMD='gexpr' ;; - *) __SHUNIT_EXPR_CMD='expr' ;; -esac - -# commands a user can override if needed -SHUNIT_EXPR_CMD=${SHUNIT_EXPR_CMD:-${__SHUNIT_EXPR_CMD}} - -# enable strict mode by default -SHUNIT_STRICT=${SHUNIT_STRICT:-${SHUNIT_TRUE}} - -# specific shell checks -if [ -n "${ZSH_VERSION:-}" ]; then - setopt |grep "^shwordsplit$" >/dev/null - if [ $? -ne ${SHUNIT_TRUE} ]; then - _shunit_fatal 'zsh shwordsplit option is required for proper operation' - fi - if [ -z "${SHUNIT_PARENT:-}" ]; then - _shunit_fatal "zsh does not pass \$0 through properly. please declare \ -\"SHUNIT_PARENT=\$0\" before calling shUnit2" - fi -fi - -# -# constants -# - -__SHUNIT_ASSERT_MSG_PREFIX='ASSERT:' -__SHUNIT_MODE_SOURCED='sourced' -__SHUNIT_MODE_STANDALONE='standalone' -__SHUNIT_PARENT=${SHUNIT_PARENT:-$0} - -# set the constants readonly -__shunit_constants=`set |grep '^__SHUNIT_' |cut -d= -f1` -echo "${__shunit_constants}" |grep '^Binary file' >/dev/null && \ - __shunit_constants=`set |grep -a '^__SHUNIT_' |cut -d= -f1` -for __shunit_const in ${__shunit_constants}; do - if [ -z "${ZSH_VERSION:-}" ]; then - readonly ${__shunit_const} - else - case ${ZSH_VERSION} in - [123].*) readonly ${__shunit_const} ;; - *) readonly -g ${__shunit_const} # declare readonly constants globally - esac - fi -done -unset __shunit_const __shunit_constants - -# -# internal variables -# - -# variables -__shunit_lineno='' # line number of executed test -__shunit_mode=${__SHUNIT_MODE_SOURCED} # operating mode -__shunit_reportGenerated=${SHUNIT_FALSE} # is report generated -__shunit_script='' # filename of unittest script (standalone mode) -__shunit_skip=${SHUNIT_FALSE} # is skipping enabled -__shunit_suite='' # suite of tests to execute - -# counts of tests -__shunit_testSuccess=${SHUNIT_TRUE} -__shunit_testsTotal=0 -__shunit_testsPassed=0 -__shunit_testsFailed=0 - -# counts of asserts -__shunit_assertsTotal=0 -__shunit_assertsPassed=0 -__shunit_assertsFailed=0 -__shunit_assertsSkipped=0 - -# macros -_SHUNIT_LINENO_='eval __shunit_lineno=""; if [ "${1:-}" = "--lineno" ]; then [ -n "$2" ] && __shunit_lineno="[$2] "; shift 2; fi' - -#----------------------------------------------------------------------------- -# private functions - -#----------------------------------------------------------------------------- -# assert functions -# - -# Assert that two values are equal to one another. -# -# Args: -# message: string: failure message [optional] -# expected: string: expected value -# actual: string: actual value -# Returns: -# integer: success (TRUE/FALSE/ERROR constant) -assertEquals() -{ - ${_SHUNIT_LINENO_} - if [ $# -lt 2 -o $# -gt 3 ]; then - _shunit_error "assertEquals() requires two or three arguments; $# given" - _shunit_error "1: ${1:+$1} 2: ${2:+$2} 3: ${3:+$3}${4:+ 4: $4}" - return ${SHUNIT_ERROR} - fi - _shunit_shouldSkip && return ${SHUNIT_TRUE} - - shunit_message_=${__shunit_lineno} - if [ $# -eq 3 ]; then - shunit_message_="${shunit_message_}$1" - shift - fi - shunit_expected_=$1 - shunit_actual_=$2 - - shunit_return=${SHUNIT_TRUE} - if [ "${shunit_expected_}" = "${shunit_actual_}" ]; then - _shunit_assertPass - else - failNotEquals "${shunit_message_}" "${shunit_expected_}" "${shunit_actual_}" - shunit_return=${SHUNIT_FALSE} - fi - - unset shunit_message_ shunit_expected_ shunit_actual_ - return ${shunit_return} -} -_ASSERT_EQUALS_='eval assertEquals --lineno "${LINENO:-}"' - -# Assert that two values are not equal to one another. -# -# Args: -# message: string: failure message [optional] -# expected: string: expected value -# actual: string: actual value -# Returns: -# integer: success (TRUE/FALSE/ERROR constant) -assertNotEquals() -{ - ${_SHUNIT_LINENO_} - if [ $# -lt 2 -o $# -gt 3 ]; then - _shunit_error "assertNotEquals() requires two or three arguments; $# given" - return ${SHUNIT_ERROR} - fi - _shunit_shouldSkip && return ${SHUNIT_TRUE} - - shunit_message_=${__shunit_lineno} - if [ $# -eq 3 ]; then - shunit_message_="${shunit_message_}$1" - shift - fi - shunit_expected_=$1 - shunit_actual_=$2 - - shunit_return=${SHUNIT_TRUE} - if [ "${shunit_expected_}" != "${shunit_actual_}" ]; then - _shunit_assertPass - else - failSame "${shunit_message_}" "$@" - shunit_return=${SHUNIT_FALSE} - fi - - unset shunit_message_ shunit_expected_ shunit_actual_ - return ${shunit_return} -} -_ASSERT_NOT_EQUALS_='eval assertNotEquals --lineno "${LINENO:-}"' - -# Assert that a value is null (i.e. an empty string) -# -# Args: -# message: string: failure message [optional] -# actual: string: actual value -# Returns: -# integer: success (TRUE/FALSE/ERROR constant) -assertNull() -{ - ${_SHUNIT_LINENO_} - if [ $# -lt 1 -o $# -gt 2 ]; then - _shunit_error "assertNull() requires one or two arguments; $# given" - return ${SHUNIT_ERROR} - fi - _shunit_shouldSkip && return ${SHUNIT_TRUE} - - shunit_message_=${__shunit_lineno} - if [ $# -eq 2 ]; then - shunit_message_="${shunit_message_}$1" - shift - fi - assertTrue "${shunit_message_}" "[ -z '$1' ]" - shunit_return=$? - - unset shunit_message_ - return ${shunit_return} -} -_ASSERT_NULL_='eval assertNull --lineno "${LINENO:-}"' - -# Assert that a value is not null (i.e. a non-empty string) -# -# Args: -# message: string: failure message [optional] -# actual: string: actual value -# Returns: -# integer: success (TRUE/FALSE/ERROR constant) -assertNotNull() -{ - ${_SHUNIT_LINENO_} - if [ $# -gt 2 ]; then # allowing 0 arguments as $1 might actually be null - _shunit_error "assertNotNull() requires one or two arguments; $# given" - return ${SHUNIT_ERROR} - fi - _shunit_shouldSkip && return ${SHUNIT_TRUE} - - shunit_message_=${__shunit_lineno} - if [ $# -eq 2 ]; then - shunit_message_="${shunit_message_}$1" - shift - fi - shunit_actual_=`_shunit_escapeCharactersInString "${1:-}"` - test -n "${shunit_actual_}" - assertTrue "${shunit_message_}" $? - shunit_return=$? - - unset shunit_actual_ shunit_message_ - return ${shunit_return} -} -_ASSERT_NOT_NULL_='eval assertNotNull --lineno "${LINENO:-}"' - -# Assert that two values are the same (i.e. equal to one another). -# -# Args: -# message: string: failure message [optional] -# expected: string: expected value -# actual: string: actual value -# Returns: -# integer: success (TRUE/FALSE/ERROR constant) -assertSame() -{ - ${_SHUNIT_LINENO_} - if [ $# -lt 2 -o $# -gt 3 ]; then - _shunit_error "assertSame() requires two or three arguments; $# given" - return ${SHUNIT_ERROR} - fi - _shunit_shouldSkip && return ${SHUNIT_TRUE} - - shunit_message_=${__shunit_lineno} - if [ $# -eq 3 ]; then - shunit_message_="${shunit_message_}$1" - shift - fi - assertEquals "${shunit_message_}" "$1" "$2" - shunit_return=$? - - unset shunit_message_ - return ${shunit_return} -} -_ASSERT_SAME_='eval assertSame --lineno "${LINENO:-}"' - -# Assert that two values are not the same (i.e. not equal to one another). -# -# Args: -# message: string: failure message [optional] -# expected: string: expected value -# actual: string: actual value -# Returns: -# integer: success (TRUE/FALSE/ERROR constant) -assertNotSame() -{ - ${_SHUNIT_LINENO_} - if [ $# -lt 2 -o $# -gt 3 ]; then - _shunit_error "assertNotSame() requires two or three arguments; $# given" - return ${SHUNIT_ERROR} - fi - _shunit_shouldSkip && return ${SHUNIT_TRUE} - - shunit_message_=${__shunit_lineno} - if [ $# -eq 3 ]; then - shunit_message_="${shunit_message_:-}$1" - shift - fi - assertNotEquals "${shunit_message_}" "$1" "$2" - shunit_return=$? - - unset shunit_message_ - return ${shunit_return} -} -_ASSERT_NOT_SAME_='eval assertNotSame --lineno "${LINENO:-}"' - -# Assert that a value or shell test condition is true. -# -# In shell, a value of 0 is true and a non-zero value is false. Any integer -# value passed can thereby be tested. -# -# Shell supports much more complicated tests though, and a means to support -# them was needed. As such, this function tests that conditions are true or -# false through evaluation rather than just looking for a true or false. -# -# The following test will succeed: -# assertTrue 0 -# assertTrue "[ 34 -gt 23 ]" -# The following test will fail with a message: -# assertTrue 123 -# assertTrue "test failed" "[ -r '/non/existent/file' ]" -# -# Args: -# message: string: failure message [optional] -# condition: string: integer value or shell conditional statement -# Returns: -# integer: success (TRUE/FALSE/ERROR constant) -assertTrue() -{ - ${_SHUNIT_LINENO_} - if [ $# -lt 1 -o $# -gt 2 ]; then - _shunit_error "assertTrue() takes one or two arguments; $# given" - return ${SHUNIT_ERROR} - fi - _shunit_shouldSkip && return ${SHUNIT_TRUE} - - shunit_message_=${__shunit_lineno} - if [ $# -eq 2 ]; then - shunit_message_="${shunit_message_}$1" - shift - fi - shunit_condition_=$1 - - # see if condition is an integer, i.e. a return value - shunit_match_=`expr "${shunit_condition_}" : '\([0-9]*\)'` - shunit_return=${SHUNIT_TRUE} - if [ -z "${shunit_condition_}" ]; then - # null condition - shunit_return=${SHUNIT_FALSE} - elif [ -n "${shunit_match_}" -a "${shunit_condition_}" = "${shunit_match_}" ] - then - # possible return value. treating 0 as true, and non-zero as false. - [ ${shunit_condition_} -ne 0 ] && shunit_return=${SHUNIT_FALSE} - else - # (hopefully) a condition - ( eval ${shunit_condition_} ) >/dev/null 2>&1 - [ $? -ne 0 ] && shunit_return=${SHUNIT_FALSE} - fi - - # record the test - if [ ${shunit_return} -eq ${SHUNIT_TRUE} ]; then - _shunit_assertPass - else - _shunit_assertFail "${shunit_message_}" - fi - - unset shunit_message_ shunit_condition_ shunit_match_ - return ${shunit_return} -} -_ASSERT_TRUE_='eval assertTrue --lineno "${LINENO:-}"' - -# Assert that a value or shell test condition is false. -# -# In shell, a value of 0 is true and a non-zero value is false. Any integer -# value passed can thereby be tested. -# -# Shell supports much more complicated tests though, and a means to support -# them was needed. As such, this function tests that conditions are true or -# false through evaluation rather than just looking for a true or false. -# -# The following test will succeed: -# assertFalse 1 -# assertFalse "[ 'apples' = 'oranges' ]" -# The following test will fail with a message: -# assertFalse 0 -# assertFalse "test failed" "[ 1 -eq 1 -a 2 -eq 2 ]" -# -# Args: -# message: string: failure message [optional] -# condition: string: integer value or shell conditional statement -# Returns: -# integer: success (TRUE/FALSE/ERROR constant) -assertFalse() -{ - ${_SHUNIT_LINENO_} - if [ $# -lt 1 -o $# -gt 2 ]; then - _shunit_error "assertFalse() quires one or two arguments; $# given" - return ${SHUNIT_ERROR} - fi - _shunit_shouldSkip && return ${SHUNIT_TRUE} - - shunit_message_=${__shunit_lineno} - if [ $# -eq 2 ]; then - shunit_message_="${shunit_message_}$1" - shift - fi - shunit_condition_=$1 - - # see if condition is an integer, i.e. a return value - shunit_match_=`expr "${shunit_condition_}" : '\([0-9]*\)'` - shunit_return=${SHUNIT_TRUE} - if [ -z "${shunit_condition_}" ]; then - # null condition - shunit_return=${SHUNIT_FALSE} - elif [ -n "${shunit_match_}" -a "${shunit_condition_}" = "${shunit_match_}" ] - then - # possible return value. treating 0 as true, and non-zero as false. - [ ${shunit_condition_} -eq 0 ] && shunit_return=${SHUNIT_FALSE} - else - # (hopefully) a condition - ( eval ${shunit_condition_} ) >/dev/null 2>&1 - [ $? -eq 0 ] && shunit_return=${SHUNIT_FALSE} - fi - - # record the test - if [ ${shunit_return} -eq ${SHUNIT_TRUE} ]; then - _shunit_assertPass - else - _shunit_assertFail "${shunit_message_}" - fi - - unset shunit_message_ shunit_condition_ shunit_match_ - return ${shunit_return} -} -_ASSERT_FALSE_='eval assertFalse --lineno "${LINENO:-}"' - -#----------------------------------------------------------------------------- -# failure functions -# - -# Records a test failure. -# -# Args: -# message: string: failure message [optional] -# Returns: -# integer: success (TRUE/FALSE/ERROR constant) -fail() -{ - ${_SHUNIT_LINENO_} - if [ $# -gt 1 ]; then - _shunit_error "fail() requires zero or one arguments; $# given" - return ${SHUNIT_ERROR} - fi - _shunit_shouldSkip && return ${SHUNIT_TRUE} - - shunit_message_=${__shunit_lineno} - if [ $# -eq 1 ]; then - shunit_message_="${shunit_message_}$1" - shift - fi - - _shunit_assertFail "${shunit_message_}" - - unset shunit_message_ - return ${SHUNIT_FALSE} -} -_FAIL_='eval fail --lineno "${LINENO:-}"' - -# Records a test failure, stating two values were not equal. -# -# Args: -# message: string: failure message [optional] -# expected: string: expected value -# actual: string: actual value -# Returns: -# integer: success (TRUE/FALSE/ERROR constant) -failNotEquals() -{ - ${_SHUNIT_LINENO_} - if [ $# -lt 2 -o $# -gt 3 ]; then - _shunit_error "failNotEquals() requires one or two arguments; $# given" - return ${SHUNIT_ERROR} - fi - _shunit_shouldSkip && return ${SHUNIT_TRUE} - - shunit_message_=${__shunit_lineno} - if [ $# -eq 3 ]; then - shunit_message_="${shunit_message_}$1" - shift - fi - shunit_expected_=$1 - shunit_actual_=$2 - - _shunit_assertFail "${shunit_message_:+${shunit_message_} }expected:<${shunit_expected_}> but was:<${shunit_actual_}>" - - unset shunit_message_ shunit_expected_ shunit_actual_ - return ${SHUNIT_FALSE} -} -_FAIL_NOT_EQUALS_='eval failNotEquals --lineno "${LINENO:-}"' - -# Records a test failure, stating two values should have been the same. -# -# Args: -# message: string: failure message [optional] -# expected: string: expected value -# actual: string: actual value -# Returns: -# integer: success (TRUE/FALSE/ERROR constant) -failSame() -{ - ${_SHUNIT_LINENO_} - if [ $# -lt 2 -o $# -gt 3 ]; then - _shunit_error "failSame() requires two or three arguments; $# given" - return ${SHUNIT_ERROR} - fi - _shunit_shouldSkip && return ${SHUNIT_TRUE} - - shunit_message_=${__shunit_lineno} - if [ $# -eq 3 ]; then - shunit_message_="${shunit_message_}$1" - shift - fi - - _shunit_assertFail "${shunit_message_:+${shunit_message_} }expected not same" - - unset shunit_message_ - return ${SHUNIT_FALSE} -} -_FAIL_SAME_='eval failSame --lineno "${LINENO:-}"' - -# Records a test failure, stating two values were not equal. -# -# This is functionally equivalent to calling failNotEquals(). -# -# Args: -# message: string: failure message [optional] -# expected: string: expected value -# actual: string: actual value -# Returns: -# integer: success (TRUE/FALSE/ERROR constant) -failNotSame() -{ - ${_SHUNIT_LINENO_} - if [ $# -lt 2 -o $# -gt 3 ]; then - _shunit_error "failNotEquals() requires one or two arguments; $# given" - return ${SHUNIT_ERROR} - fi - _shunit_shouldSkip && return ${SHUNIT_TRUE} - - shunit_message_=${__shunit_lineno} - if [ $# -eq 3 ]; then - shunit_message_="${shunit_message_}$1" - shift - fi - failNotEquals "${shunit_message_}" "$1" "$2" - shunit_return=$? - - unset shunit_message_ - return ${shunit_return} -} -_FAIL_NOT_SAME_='eval failNotSame --lineno "${LINENO:-}"' - -#----------------------------------------------------------------------------- -# skipping functions -# - -# Force remaining assert and fail functions to be "skipped". -# -# This function forces the remaining assert and fail functions to be "skipped", -# i.e. they will have no effect. Each function skipped will be recorded so that -# the total of asserts and fails will not be altered. -# -# Args: -# None -startSkipping() -{ - __shunit_skip=${SHUNIT_TRUE} -} - -# Resume the normal recording behavior of assert and fail calls. -# -# Args: -# None -endSkipping() -{ - __shunit_skip=${SHUNIT_FALSE} -} - -# Returns the state of assert and fail call skipping. -# -# Args: -# None -# Returns: -# boolean: (TRUE/FALSE constant) -isSkipping() -{ - return ${__shunit_skip} -} - -#----------------------------------------------------------------------------- -# suite functions -# - -# Stub. This function should contains all unit test calls to be made. -# -# DEPRECATED (as of 2.1.0) -# -# This function can be optionally overridden by the user in their test suite. -# -# If this function exists, it will be called when shunit2 is sourced. If it -# does not exist, shunit2 will search the parent script for all functions -# beginning with the word 'test', and they will be added dynamically to the -# test suite. -# -# This function should be overridden by the user in their unit test suite. -# Note: see _shunit_mktempFunc() for actual implementation -# -# Args: -# None -#suite() { :; } # DO NOT UNCOMMENT THIS FUNCTION - -# Adds a function name to the list of tests schedule for execution. -# -# This function should only be called from within the suite() function. -# -# Args: -# function: string: name of a function to add to current unit test suite -suite_addTest() -{ - shunit_func_=${1:-} - - __shunit_suite="${__shunit_suite:+${__shunit_suite} }${shunit_func_}" - __shunit_testsTotal=`expr ${__shunit_testsTotal} + 1` - - unset shunit_func_ -} - -# Stub. This function will be called once before any tests are run. -# -# Common one-time environment preparation tasks shared by all tests can be -# defined here. -# -# This function should be overridden by the user in their unit test suite. -# Note: see _shunit_mktempFunc() for actual implementation -# -# Args: -# None -#oneTimeSetUp() { :; } # DO NOT UNCOMMENT THIS FUNCTION - -# Stub. This function will be called once after all tests are finished. -# -# Common one-time environment cleanup tasks shared by all tests can be defined -# here. -# -# This function should be overridden by the user in their unit test suite. -# Note: see _shunit_mktempFunc() for actual implementation -# -# Args: -# None -#oneTimeTearDown() { :; } # DO NOT UNCOMMENT THIS FUNCTION - -# Stub. This function will be called before each test is run. -# -# Common environment preparation tasks shared by all tests can be defined here. -# -# This function should be overridden by the user in their unit test suite. -# Note: see _shunit_mktempFunc() for actual implementation -# -# Args: -# None -#setUp() { :; } - -# Note: see _shunit_mktempFunc() for actual implementation -# Stub. This function will be called after each test is run. -# -# Common environment cleanup tasks shared by all tests can be defined here. -# -# This function should be overridden by the user in their unit test suite. -# Note: see _shunit_mktempFunc() for actual implementation -# -# Args: -# None -#tearDown() { :; } # DO NOT UNCOMMENT THIS FUNCTION - -#------------------------------------------------------------------------------ -# internal shUnit2 functions -# - -# Create a temporary directory to store various run-time files in. -# -# This function is a cross-platform temporary directory creation tool. Not all -# OSes have the mktemp function, so one is included here. -# -# Args: -# None -# Outputs: -# string: the temporary directory that was created -_shunit_mktempDir() -{ - # try the standard mktemp function - ( exec mktemp -dqt shunit.XXXXXX 2>/dev/null ) && return - - # the standard mktemp didn't work. doing our own. - if [ -r '/dev/urandom' -a -x '/usr/bin/od' ]; then - _shunit_random_=`/usr/bin/od -vAn -N4 -tx4 "${_shunit_file_}" -#! /bin/sh -exit ${SHUNIT_TRUE} -EOF - chmod +x "${_shunit_file_}" - done - - unset _shunit_file_ -} - -# Final cleanup function to leave things as we found them. -# -# Besides removing the temporary directory, this function is in charge of the -# final exit code of the unit test. The exit code is based on how the script -# was ended (e.g. normal exit, or via Ctrl-C). -# -# Args: -# name: string: name of the trap called (specified when trap defined) -_shunit_cleanup() -{ - _shunit_name_=$1 - - case ${_shunit_name_} in - EXIT) _shunit_signal_=0 ;; - INT) _shunit_signal_=2 ;; - TERM) _shunit_signal_=15 ;; - *) - _shunit_warn "unrecognized trap value (${_shunit_name_})" - _shunit_signal_=0 - ;; - esac - - # do our work - rm -fr "${__shunit_tmpDir}" - - # exit for all non-EXIT signals - if [ ${_shunit_name_} != 'EXIT' ]; then - _shunit_warn "trapped and now handling the (${_shunit_name_}) signal" - # disable EXIT trap - trap 0 - # add 128 to signal and exit - exit `expr ${_shunit_signal_} + 128` - elif [ ${__shunit_reportGenerated} -eq ${SHUNIT_FALSE} ] ; then - _shunit_assertFail 'Unknown failure encountered running a test' - _shunit_generateReport - exit ${SHUNIT_ERROR} - fi - - unset _shunit_name_ _shunit_signal_ -} - -# The actual running of the tests happens here. -# -# Args: -# None -_shunit_execSuite() -{ - for _shunit_test_ in ${__shunit_suite}; do - __shunit_testSuccess=${SHUNIT_TRUE} - - # disable skipping - endSkipping - - # execute the per-test setup function - setUp - - # execute the test - echo "${_shunit_test_}" - eval ${_shunit_test_} - - # execute the per-test tear-down function - tearDown - - # update stats - if [ ${__shunit_testSuccess} -eq ${SHUNIT_TRUE} ]; then - __shunit_testsPassed=`expr ${__shunit_testsPassed} + 1` - else - __shunit_testsFailed=`expr ${__shunit_testsFailed} + 1` - fi - done - - unset _shunit_test_ -} - -# Generates the user friendly report with appropriate OK/FAILED message. -# -# Args: -# None -# Output: -# string: the report of successful and failed tests, as well as totals. -_shunit_generateReport() -{ - _shunit_ok_=${SHUNIT_TRUE} - - # if no exit code was provided one, determine an appropriate one - [ ${__shunit_testsFailed} -gt 0 \ - -o ${__shunit_testSuccess} -eq ${SHUNIT_FALSE} ] \ - && _shunit_ok_=${SHUNIT_FALSE} - - echo - if [ ${__shunit_testsTotal} -eq 1 ]; then - echo "Ran ${__shunit_testsTotal} test." - else - echo "Ran ${__shunit_testsTotal} tests." - fi - - _shunit_failures_='' - _shunit_skipped_='' - [ ${__shunit_assertsFailed} -gt 0 ] \ - && _shunit_failures_="failures=${__shunit_assertsFailed}" - [ ${__shunit_assertsSkipped} -gt 0 ] \ - && _shunit_skipped_="skipped=${__shunit_assertsSkipped}" - - if [ ${_shunit_ok_} -eq ${SHUNIT_TRUE} ]; then - _shunit_msg_='OK' - [ -n "${_shunit_skipped_}" ] \ - && _shunit_msg_="${_shunit_msg_} (${_shunit_skipped_})" - else - _shunit_msg_="FAILED (${_shunit_failures_}" - [ -n "${_shunit_skipped_}" ] \ - && _shunit_msg_="${_shunit_msg_},${_shunit_skipped_}" - _shunit_msg_="${_shunit_msg_})" - fi - - echo - echo ${_shunit_msg_} - __shunit_reportGenerated=${SHUNIT_TRUE} - - unset _shunit_failures_ _shunit_msg_ _shunit_ok_ _shunit_skipped_ -} - -# Test for whether a function should be skipped. -# -# Args: -# None -# Returns: -# boolean: whether the test should be skipped (TRUE/FALSE constant) -_shunit_shouldSkip() -{ - [ ${__shunit_skip} -eq ${SHUNIT_FALSE} ] && return ${SHUNIT_FALSE} - _shunit_assertSkip -} - -# Records a successful test. -# -# Args: -# None -_shunit_assertPass() -{ - __shunit_assertsPassed=`expr ${__shunit_assertsPassed} + 1` - __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1` -} - -# Records a test failure. -# -# Args: -# message: string: failure message to provide user -_shunit_assertFail() -{ - _shunit_msg_=$1 - - __shunit_testSuccess=${SHUNIT_FALSE} - __shunit_assertsFailed=`expr ${__shunit_assertsFailed} + 1` - __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1` - echo "${__SHUNIT_ASSERT_MSG_PREFIX}${_shunit_msg_}" - - unset _shunit_msg_ -} - -# Records a skipped test. -# -# Args: -# None -_shunit_assertSkip() -{ - __shunit_assertsSkipped=`expr ${__shunit_assertsSkipped} + 1` - __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1` -} - -# Prepare a script filename for sourcing. -# -# Args: -# script: string: path to a script to source -# Returns: -# string: filename prefixed with ./ (if necessary) -_shunit_prepForSourcing() -{ - _shunit_script_=$1 - case "${_shunit_script_}" in - /*|./*) echo "${_shunit_script_}" ;; - *) echo "./${_shunit_script_}" ;; - esac - unset _shunit_script_ -} - -# Escape a character in a string. -# -# Args: -# c: string: unescaped character -# s: string: to escape character in -# Returns: -# string: with escaped character(s) -_shunit_escapeCharInStr() -{ - [ -n "$2" ] || return # no point in doing work on an empty string - - # Note: using shorter variable names to prevent conflicts with - # _shunit_escapeCharactersInString(). - _shunit_c_=$1 - _shunit_s_=$2 - - - # escape the character - echo ''${_shunit_s_}'' |sed 's/\'${_shunit_c_}'/\\\'${_shunit_c_}'/g' - - unset _shunit_c_ _shunit_s_ -} - -# Escape a character in a string. -# -# Args: -# str: string: to escape characters in -# Returns: -# string: with escaped character(s) -_shunit_escapeCharactersInString() -{ - [ -n "$1" ] || return # no point in doing work on an empty string - - _shunit_str_=$1 - - # Note: using longer variable names to prevent conflicts with - # _shunit_escapeCharInStr(). - for _shunit_char_ in '"' '$' "'" '`'; do - _shunit_str_=`_shunit_escapeCharInStr "${_shunit_char_}" "${_shunit_str_}"` - done - - echo "${_shunit_str_}" - unset _shunit_char_ _shunit_str_ -} - -# Extract list of functions to run tests against. -# -# Args: -# script: string: name of script to extract functions from -# Returns: -# string: of function names -_shunit_extractTestFunctions() -{ - _shunit_script_=$1 - - # extract the lines with test function names, strip of anything besides the - # function name, and output everything on a single line. - _shunit_regex_='^[ ]*(function )*test[A-Za-z0-9_]* *\(\)' - egrep "${_shunit_regex_}" "${_shunit_script_}" \ - |sed 's/^[^A-Za-z0-9_]*//;s/^function //;s/\([A-Za-z0-9_]*\).*/\1/g' \ - |xargs - - unset _shunit_regex_ _shunit_script_ -} - -#------------------------------------------------------------------------------ -# main -# - -# determine the operating mode -if [ $# -eq 0 ]; then - __shunit_script=${__SHUNIT_PARENT} - __shunit_mode=${__SHUNIT_MODE_SOURCED} -else - __shunit_script=$1 - [ -r "${__shunit_script}" ] || \ - _shunit_fatal "unable to read from ${__shunit_script}" - __shunit_mode=${__SHUNIT_MODE_STANDALONE} -fi - -# create a temporary storage location -__shunit_tmpDir=`_shunit_mktempDir` - -# provide a public temporary directory for unit test scripts -# TODO(kward): document this -SHUNIT_TMPDIR="${__shunit_tmpDir}/tmp" -mkdir "${SHUNIT_TMPDIR}" - -# setup traps to clean up after ourselves -trap '_shunit_cleanup EXIT' 0 -trap '_shunit_cleanup INT' 2 -trap '_shunit_cleanup TERM' 15 - -# create phantom functions to work around issues with Cygwin -_shunit_mktempFunc -PATH="${__shunit_tmpDir}:${PATH}" - -# make sure phantom functions are executable. this will bite if /tmp (or the -# current $TMPDIR) points to a path on a partition that was mounted with the -# 'noexec' option. the noexec command was created with _shunit_mktempFunc(). -noexec 2>/dev/null || _shunit_fatal \ - 'please declare TMPDIR with path on partition with exec permission' - -# we must manually source the tests in standalone mode -if [ "${__shunit_mode}" = "${__SHUNIT_MODE_STANDALONE}" ]; then - . "`_shunit_prepForSourcing \"${__shunit_script}\"`" -fi - -# execute the oneTimeSetUp function (if it exists) -oneTimeSetUp - -# execute the suite function defined in the parent test script -# deprecated as of 2.1.0 -suite - -# if no suite function was defined, dynamically build a list of functions -if [ -z "${__shunit_suite}" ]; then - shunit_funcs_=`_shunit_extractTestFunctions "${__shunit_script}"` - for shunit_func_ in ${shunit_funcs_}; do - suite_addTest ${shunit_func_} - done -fi -unset shunit_func_ shunit_funcs_ - -# execute the tests -_shunit_execSuite - -# execute the oneTimeTearDown function (if it exists) -oneTimeTearDown - -# generate the report -_shunit_generateReport - -# that's it folks -[ ${__shunit_testsFailed} -eq 0 ] -exit $? diff --git a/tests/tendermint/main.go b/tests/tendermint/main.go deleted file mode 100644 index 0f27a2cd1c..0000000000 --- a/tests/tendermint/main.go +++ /dev/null @@ -1,142 +0,0 @@ -package main - -func main() {} - -// import ( -// "fmt" -// "time" - -// "github.com/gorilla/websocket" -// "github.com/cosmos/cosmos-sdk/types" -// wire "github.com/tendermint/go-wire" -// _ "github.com/tendermint/tendermint/rpc/core/types" // Register RPCResponse > Result types -// "github.com/tendermint/tendermint/rpc/lib/client" -// "github.com/tendermint/tendermint/rpc/lib/types" -// cmn "github.com/tendermint/tmlibs/common" -// ) - -// func main() { -// // ws := rpcclient.NewWSClient("127.0.0.1:46657", "/websocket") -// ws := rpcclient.NewWSClient("192.168.99.100:46657", "/websocket") -// chainID := "test_chain_id" - -// _, err := ws.Start() -// if err != nil { -// cmn.Exit(err.Error()) -// } -// var counter = 0 - -// // Read a bunch of responses -// go func() { -// for { -// res, ok := <-ws.ResultsCh -// if !ok { -// break -// } -// fmt.Println(counter, "res:", cmn.Blue(string(res))) -// } -// }() - -// // Get the root account -// root := types.PrivAccountFromSecret("test") -// sequence := int(0) -// // Make a bunch of PrivAccounts -// privAccounts := types.RandAccounts(1000, 1000000, 0) -// privAccountSequences := make(map[string]int) - -// // Send coins to each account -// for i := 0; i < len(privAccounts); i++ { -// privAccount := privAccounts[i] -// tx := &types.SendTx{ -// Inputs: []types.TxInput{ -// types.TxInput{ -// Address: root.Account.PubKey.Address(), -// PubKey: root.Account.PubKey, // TODO is this needed? -// Coins: coin.Coins{{"", 1000002}}, -// Sequence: sequence, -// }, -// }, -// Outputs: []types.TxOutput{ -// types.TxOutput{ -// Address: privAccount.Account.PubKey.Address(), -// Coins: coin.Coins{{"", 1000000}}, -// }, -// }, -// } -// sequence += 1 - -// // Sign request -// signBytes := tx.SignBytes(chainID) -// sig := root.Sign(signBytes) -// tx.Inputs[0].Signature = sig -// //fmt.Println("tx:", tx) - -// // Write request -// txBytes := wire.BinaryBytes(struct{ types.Tx }{tx}) -// request, err := rpctypes.MapToRequest("fakeid", "broadcast_tx_sync", map[string]interface{}{"tx": txBytes}) -// if err != nil { -// cmn.Exit("cannot encode request: " + err.Error()) -// } -// reqBytes := wire.JSONBytes(request) -// //fmt.Print(".") -// err = ws.WriteMessage(websocket.TextMessage, reqBytes) -// if err != nil { -// cmn.Exit("writing websocket request: " + err.Error()) -// } -// } - -// // Now send coins between these accounts -// for { -// counter += 1 -// time.Sleep(time.Millisecond * 10) - -// randA := cmn.RandInt() % len(privAccounts) -// randB := cmn.RandInt() % len(privAccounts) -// if randA == randB { -// continue -// } - -// privAccountA := privAccounts[randA] -// privAccountASequence := privAccountSequences[privAccountA.Account.PubKey.KeyString()] -// privAccountSequences[privAccountA.Account.PubKey.KeyString()] = privAccountASequence + 1 -// privAccountB := privAccounts[randB] - -// tx := &types.SendTx{ -// Inputs: []types.TxInput{ -// types.TxInput{ -// Address: privAccountA.Account.PubKey.Address(), -// PubKey: privAccountA.Account.PubKey, -// Coins: coin.Coins{{"", 3}}, -// Sequence: privAccountASequence + 1, -// }, -// }, -// Outputs: []types.TxOutput{ -// types.TxOutput{ -// Address: privAccountB.Account.PubKey.Address(), -// Coins: coin.Coins{{"", 1}}, -// }, -// }, -// } - -// // Sign request -// signBytes := tx.SignBytes(chainID) -// sig := privAccountA.Sign(signBytes) -// tx.Inputs[0].Signature = sig -// //fmt.Println("tx:", tx) - -// // Write request -// txBytes := wire.BinaryBytes(struct{ types.Tx }{tx}) -// request, err := rpctypes.MapToRequest("fakeid", "broadcast_tx_sync", map[string]interface{}{"tx": txBytes}) -// if err != nil { -// cmn.Exit("cannot encode request: " + err.Error()) -// } -// reqBytes := wire.JSONBytes(request) -// //fmt.Print(".") -// err = ws.WriteMessage(websocket.TextMessage, reqBytes) -// if err != nil { -// cmn.Exit("writing websocket request: " + err.Error()) -// } -// } - -// ws.Stop() -// } diff --git a/test_cover.sh b/tests/test_cover.sh similarity index 100% rename from test_cover.sh rename to tests/test_cover.sh