fix lint js

This commit is contained in:
0xmuralik 2023-03-14 11:18:42 +05:30
parent db034c2e43
commit 3e22bc9316
44 changed files with 56931 additions and 55014 deletions

14
.github/workflows/linter.yml vendored Normal file
View File

@ -0,0 +1,14 @@
linter:
# By default, Super-Linter will run all linters
# You can disable specific linters here by removing them from the list
# You can add additional linters that Super-Linter does not support by opening a pull request
# See https://github.com/github/super-linter#supported-linters
# An example of adding a custom linter is available at https://github.com/github/super-linter/tree/main/custom_linters
#
Disabled linters
- gitleaks # Remove this line to enable gitleaks
# Enabled linters
# - ansible-lint
# - arm-ttk
# - ...

View File

@ -7,9 +7,9 @@ ENV PACKAGES git build-base
WORKDIR /go/src/github.com/cerc-io/laconicd WORKDIR /go/src/github.com/cerc-io/laconicd
# Install dependencies # Install dependencies
RUN apk add --no-cache $PACKAGES RUN apk add --no-cache $PACKAGES=~
RUN apk add linux-headers RUN apk add --no-cache linux-headers=~
# Add source files # Add source files
COPY . . COPY . .
@ -21,7 +21,7 @@ RUN make build
FROM alpine:3.17.1 FROM alpine:3.17.1
# Install ca-certificates # Install ca-certificates
RUN apk add --no-cache ca-certificates jq curl RUN apk add --no-cache ca-certificates=~ jq=~ curl=~
WORKDIR / WORKDIR /
# Copy over binaries from the build-env # Copy over binaries from the build-env

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

130
init.sh
View File

@ -28,69 +28,69 @@ laconicd keys add $KEY --keyring-backend $KEYRING --algo $KEYALGO
laconicd init $MONIKER --chain-id $CHAINID laconicd init $MONIKER --chain-id $CHAINID
# Change parameter token denominations to aphoton # Change parameter token denominations to aphoton
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["staking"]["params"]["bond_denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["staking"]["params"]["bond_denom"]="aphoton"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["crisis"]["constant_fee"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["crisis"]["constant_fee"]["denom"]="aphoton"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["gov"]["deposit_params"]["min_deposit"][0]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["gov"]["deposit_params"]["min_deposit"][0]["denom"]="aphoton"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["mint"]["params"]["mint_denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["mint"]["params"]["mint_denom"]="aphoton"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
# Custom modules # Custom modules
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["record_rent"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["record_rent"]["denom"]="aphoton"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_rent"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["authority_rent"]["denom"]="aphoton"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_commit_fee"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["authority_auction_commit_fee"]["denom"]="aphoton"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_reveal_fee"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["authority_auction_reveal_fee"]["denom"]="aphoton"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_minimum_bid"]["denom"]="aphoton"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["authority_auction_minimum_bid"]["denom"]="aphoton"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
if [[ "$TEST_REGISTRY_EXPIRY" == "true" ]]; then if [[ "$TEST_REGISTRY_EXPIRY" == "true" ]]; then
echo "Setting timers for expiry tests." echo "Setting timers for expiry tests."
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["record_rent_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["record_rent_duration"]="60s"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_grace_period"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["authority_grace_period"]="60s"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_rent_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["authority_rent_duration"]="60s"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
fi fi
if [[ "$TEST_AUCTION_ENABLED" == "true" ]]; then if [[ "$TEST_AUCTION_ENABLED" == "true" ]]; then
echo "Enabling auction and setting timers." echo "Enabling auction and setting timers."
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_enabled"]=true' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["authority_auction_enabled"]=true' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_rent_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["authority_rent_duration"]="60s"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_grace_period"]="300s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["authority_grace_period"]="300s"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_commits_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["authority_auction_commits_duration"]="60s"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
cat $HOME/.laconicd/config/genesis.json | jq '.app_state["registry"]["params"]["authority_auction_reveals_duration"]="60s"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.app_state["registry"]["params"]["authority_auction_reveals_duration"]="60s"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
fi fi
# increase block time (?) # increase block time (?)
cat $HOME/.laconicd/config/genesis.json | jq '.consensus_params["block"]["time_iota_ms"]="1000"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.consensus_params["block"]["time_iota_ms"]="1000"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
# Set gas limit in genesis # Set gas limit in genesis
cat $HOME/.laconicd/config/genesis.json | jq '.consensus_params["block"]["max_gas"]="10000000"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.consensus_params["block"]["max_gas"]="10000000"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
# disable produce empty block # disable produce empty block
if [[ "$OSTYPE" == "darwin"* ]]; then if [[ "$OSTYPE" == "darwin"* ]]; then
sed -i '' 's/create_empty_blocks = true/create_empty_blocks = false/g' $HOME/.laconicd/config/config.toml sed -i '' 's/create_empty_blocks = true/create_empty_blocks = false/g' "$HOME"/.laconicd/config/config.toml
else else
sed -i 's/create_empty_blocks = true/create_empty_blocks = false/g' $HOME/.laconicd/config/config.toml sed -i 's/create_empty_blocks = true/create_empty_blocks = false/g' "$HOME"/.laconicd/config/config.toml
fi fi
if [[ $1 == "pending" ]]; then if [[ $1 == "pending" ]]; then
if [[ "$OSTYPE" == "darwin"* ]]; then if [[ "$OSTYPE" == "darwin"* ]]; then
sed -i '' 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' $HOME/.laconicd/config/config.toml sed -i '' 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' "$HOME"/.laconicd/config/config.toml
sed -i '' 's/timeout_propose = "3s"/timeout_propose = "30s"/g' $HOME/.laconicd/config/config.toml sed -i '' 's/timeout_propose = "3s"/timeout_propose = "30s"/g' "$HOME"/.laconicd/config/config.toml
sed -i '' 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' $HOME/.laconicd/config/config.toml sed -i '' 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' "$HOME"/.laconicd/config/config.toml
sed -i '' 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' $HOME/.laconicd/config/config.toml sed -i '' 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' "$HOME"/.laconicd/config/config.toml
sed -i '' 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' $HOME/.laconicd/config/config.toml sed -i '' 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' "$HOME"/.laconicd/config/config.toml
sed -i '' 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' $HOME/.laconicd/config/config.toml sed -i '' 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' "$HOME"/.laconicd/config/config.toml
sed -i '' 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' $HOME/.laconicd/config/config.toml sed -i '' 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' "$HOME"/.laconicd/config/config.toml
sed -i '' 's/timeout_commit = "5s"/timeout_commit = "150s"/g' $HOME/.laconicd/config/config.toml sed -i '' 's/timeout_commit = "5s"/timeout_commit = "150s"/g' "$HOME"/.laconicd/config/config.toml
sed -i '' 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' $HOME/.laconicd/config/config.toml sed -i '' 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' "$HOME"/.laconicd/config/config.toml
else else
sed -i 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' $HOME/.laconicd/config/config.toml sed -i 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' "$HOME"/.laconicd/config/config.toml
sed -i 's/timeout_propose = "3s"/timeout_propose = "30s"/g' $HOME/.laconicd/config/config.toml sed -i 's/timeout_propose = "3s"/timeout_propose = "30s"/g' "$HOME"/.laconicd/config/config.toml
sed -i 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' $HOME/.laconicd/config/config.toml sed -i 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' "$HOME"/.laconicd/config/config.toml
sed -i 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' $HOME/.laconicd/config/config.toml sed -i 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' "$HOME"/.laconicd/config/config.toml
sed -i 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' $HOME/.laconicd/config/config.toml sed -i 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' "$HOME"/.laconicd/config/config.toml
sed -i 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' $HOME/.laconicd/config/config.toml sed -i 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' "$HOME"/.laconicd/config/config.toml
sed -i 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' $HOME/.laconicd/config/config.toml sed -i 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' "$HOME"/.laconicd/config/config.toml
sed -i 's/timeout_commit = "5s"/timeout_commit = "150s"/g' $HOME/.laconicd/config/config.toml sed -i 's/timeout_commit = "5s"/timeout_commit = "150s"/g' "$HOME"/.laconicd/config/config.toml
sed -i 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' $HOME/.laconicd/config/config.toml sed -i 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' "$HOME"/.laconicd/config/config.toml
fi fi
fi fi
@ -108,39 +108,39 @@ laconicd validate-genesis
# disable produce empty block and enable prometheus metrics # disable produce empty block and enable prometheus metrics
if [[ "$OSTYPE" == "darwin"* ]]; then if [[ "$OSTYPE" == "darwin"* ]]; then
sed -i '' 's/create_empty_blocks = true/create_empty_blocks = false/g' $HOME/.ethermintd/config/config.toml sed -i '' 's/create_empty_blocks = true/create_empty_blocks = false/g' "$HOME"/.ethermintd/config/config.toml
sed -i '' 's/prometheus = false/prometheus = true/' $HOME/.ethermintd/config/config.toml sed -i '' 's/prometheus = false/prometheus = true/' "$HOME"/.ethermintd/config/config.toml
sed -i '' 's/prometheus-retention-time = 0/prometheus-retention-time = 1000000000000/g' $HOME/.ethermintd/config/app.toml sed -i '' 's/prometheus-retention-time = 0/prometheus-retention-time = 1000000000000/g' "$HOME"/.ethermintd/config/app.toml
sed -i '' 's/enabled = false/enabled = true/g' $HOME/.ethermintd/config/app.toml sed -i '' 's/enabled = false/enabled = true/g' "$HOME"/.ethermintd/config/app.toml
else else
sed -i 's/create_empty_blocks = true/create_empty_blocks = false/g' $HOME/.ethermintd/config/config.toml sed -i 's/create_empty_blocks = true/create_empty_blocks = false/g' "$HOME"/.ethermintd/config/config.toml
sed -i 's/prometheus = false/prometheus = true/' $HOME/.ethermintd/config/config.toml sed -i 's/prometheus = false/prometheus = true/' "$HOME"/.ethermintd/config/config.toml
sed -i 's/prometheus-retention-time = "0"/prometheus-retention-time = "1000000000000"/g' $HOME/.ethermintd/config/app.toml sed -i 's/prometheus-retention-time = "0"/prometheus-retention-time = "1000000000000"/g' "$HOME"/.ethermintd/config/app.toml
sed -i 's/enabled = false/enabled = true/g' $HOME/.ethermintd/config/app.toml sed -i 's/enabled = false/enabled = true/g' "$HOME"/.ethermintd/config/app.toml
fi fi
if [[ $1 == "pending" ]]; then if [[ $1 == "pending" ]]; then
echo "pending mode is on, please wait for the first block committed." echo "pending mode is on, please wait for the first block committed."
if [[ $OSTYPE == "darwin"* ]]; then if [[ $OSTYPE == "darwin"* ]]; then
sed -i '' 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' $HOME/.ethermintd/config/config.toml sed -i '' 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i '' 's/timeout_propose = "3s"/timeout_propose = "30s"/g' $HOME/.ethermintd/config/config.toml sed -i '' 's/timeout_propose = "3s"/timeout_propose = "30s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i '' 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' $HOME/.ethermintd/config/config.toml sed -i '' 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i '' 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' $HOME/.ethermintd/config/config.toml sed -i '' 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i '' 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' $HOME/.ethermintd/config/config.toml sed -i '' 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i '' 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' $HOME/.ethermintd/config/config.toml sed -i '' 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i '' 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' $HOME/.ethermintd/config/config.toml sed -i '' 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i '' 's/timeout_commit = "5s"/timeout_commit = "150s"/g' $HOME/.ethermintd/config/config.toml sed -i '' 's/timeout_commit = "5s"/timeout_commit = "150s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i '' 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' $HOME/.ethermintd/config/config.toml sed -i '' 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' "$HOME"/.ethermintd/config/config.toml
else else
sed -i 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' $HOME/.ethermintd/config/config.toml sed -i 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i 's/timeout_propose = "3s"/timeout_propose = "30s"/g' $HOME/.ethermintd/config/config.toml sed -i 's/timeout_propose = "3s"/timeout_propose = "30s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' $HOME/.ethermintd/config/config.toml sed -i 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "5s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' $HOME/.ethermintd/config/config.toml sed -i 's/timeout_prevote = "1s"/timeout_prevote = "10s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' $HOME/.ethermintd/config/config.toml sed -i 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "5s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' $HOME/.ethermintd/config/config.toml sed -i 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' $HOME/.ethermintd/config/config.toml sed -i 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "5s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i 's/timeout_commit = "5s"/timeout_commit = "150s"/g' $HOME/.ethermintd/config/config.toml sed -i 's/timeout_commit = "5s"/timeout_commit = "150s"/g' "$HOME"/.ethermintd/config/config.toml
sed -i 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' $HOME/.ethermintd/config/config.toml sed -i 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' "$HOME"/.ethermintd/config/config.toml
fi fi
fi fi

View File

@ -36,7 +36,6 @@ usage() {
echo "-s <number> -- Sleep between operations in secs. eg: 5" echo "-s <number> -- Sleep between operations in secs. eg: 5"
echo "-m <string> -- Mode for testing. eg: rpc" echo "-m <string> -- Mode for testing. eg: rpc"
echo "-r <string> -- Remove test dir after, eg: true, default is false" echo "-r <string> -- Remove test dir after, eg: true, default is false"
exit 1
} }
while getopts "h?t:q:z:s:m:r:" args; do while getopts "h?t:q:z:s:m:r:" args; do
@ -74,7 +73,7 @@ init_func() {
"$PWD"/build/laconicd keys add $KEY"$i" --keyring-backend test --home "$DATA_DIR$i" --no-backup --algo "eth_secp256k1" "$PWD"/build/laconicd keys add $KEY"$i" --keyring-backend test --home "$DATA_DIR$i" --no-backup --algo "eth_secp256k1"
"$PWD"/build/laconicd init $MONIKER --chain-id $CHAINID --home "$DATA_DIR$i" "$PWD"/build/laconicd init $MONIKER --chain-id $CHAINID --home "$DATA_DIR$i"
# Set gas limit in genesis # Set gas limit in genesis
cat $DATA_DIR$i/config/genesis.json | jq '.consensus_params["block"]["max_gas"]="10000000"' > $DATA_DIR$i/config/tmp_genesis.json && mv $DATA_DIR$i/config/tmp_genesis.json $DATA_DIR$i/config/genesis.json < "$DATA_DIR""$i"/config/genesis.json jq '.consensus_params["block"]["max_gas"]="10000000"' > "$DATA_DIR""$i"/config/tmp_genesis.json && mv "$DATA_DIR""$i"/config/tmp_genesis.json "$DATA_DIR""$i"/config/genesis.json
"$PWD"/build/laconicd add-genesis-account \ "$PWD"/build/laconicd add-genesis-account \
"$("$PWD"/build/laconicd keys show "$KEY$i" --keyring-backend test -a --home "$DATA_DIR$i")" 1000000000000000000aphoton,1000000000000000000stake \ "$("$PWD"/build/laconicd keys show "$KEY$i" --keyring-backend test -a --home "$DATA_DIR$i")" 1000000000000000000aphoton,1000000000000000000stake \
--keyring-backend test --home "$DATA_DIR$i" --keyring-backend test --home "$DATA_DIR$i"
@ -83,27 +82,27 @@ init_func() {
"$PWD"/build/laconicd validate-genesis --home "$DATA_DIR$i" "$PWD"/build/laconicd validate-genesis --home "$DATA_DIR$i"
if [[ $MODE == "pending" ]]; then if [[ $MODE == "pending" ]]; then
ls $DATA_DIR$i ls "$DATA_DIR""$i"
if [[ "$OSTYPE" == "darwin"* ]]; then if [[ "$OSTYPE" == "darwin"* ]]; then
sed -i '' 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' $DATA_DIR$i/config/config.toml sed -i '' 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i '' 's/timeout_propose = "3s"/timeout_propose = "30s"/g' $DATA_DIR$i/config/config.toml sed -i '' 's/timeout_propose = "3s"/timeout_propose = "30s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i '' 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "2s"/g' $DATA_DIR$i/config/config.toml sed -i '' 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "2s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i '' 's/timeout_prevote = "1s"/timeout_prevote = "120s"/g' $DATA_DIR$i/config/config.toml sed -i '' 's/timeout_prevote = "1s"/timeout_prevote = "120s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i '' 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "2s"/g' $DATA_DIR$i/config/config.toml sed -i '' 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "2s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i '' 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' $DATA_DIR$i/config/config.toml sed -i '' 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i '' 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "2s"/g' $DATA_DIR$i/config/config.toml sed -i '' 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "2s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i '' 's/timeout_commit = "5s"/timeout_commit = "150s"/g' $DATA_DIR$i/config/config.toml sed -i '' 's/timeout_commit = "5s"/timeout_commit = "150s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i '' 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' $DATA_DIR$i/config/config.toml sed -i '' 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' "$DATA_DIR""$i"/config/config.toml
else else
sed -i 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' $DATA_DIR$i/config/config.toml sed -i 's/create_empty_blocks_interval = "0s"/create_empty_blocks_interval = "30s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i 's/timeout_propose = "3s"/timeout_propose = "30s"/g' $DATA_DIR$i/config/config.toml sed -i 's/timeout_propose = "3s"/timeout_propose = "30s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "2s"/g' $DATA_DIR$i/config/config.toml sed -i 's/timeout_propose_delta = "500ms"/timeout_propose_delta = "2s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i 's/timeout_prevote = "1s"/timeout_prevote = "120s"/g' $DATA_DIR$i/config/config.toml sed -i 's/timeout_prevote = "1s"/timeout_prevote = "120s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "2s"/g' $DATA_DIR$i/config/config.toml sed -i 's/timeout_prevote_delta = "500ms"/timeout_prevote_delta = "2s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' $DATA_DIR$i/config/config.toml sed -i 's/timeout_precommit = "1s"/timeout_precommit = "10s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "2s"/g' $DATA_DIR$i/config/config.toml sed -i 's/timeout_precommit_delta = "500ms"/timeout_precommit_delta = "2s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i 's/timeout_commit = "5s"/timeout_commit = "150s"/g' $DATA_DIR$i/config/config.toml sed -i 's/timeout_commit = "5s"/timeout_commit = "150s"/g' "$DATA_DIR""$i"/config/config.toml
sed -i 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' $DATA_DIR$i/config/config.toml sed -i 's/timeout_broadcast_tx_commit = "10s"/timeout_broadcast_tx_commit = "150s"/g' "$DATA_DIR""$i"/config/config.toml
fi fi
fi fi
} }
@ -169,9 +168,9 @@ stop_func() {
kill -9 "$LACONICD_PID" kill -9 "$LACONICD_PID"
wait "$LACONICD_PID" wait "$LACONICD_PID"
if [ $REMOVE_DATA_DIR == "true" ] if [ "$REMOVE_DATA_DIR" == "true" ]
then then
rm -rf $DATA_DIR* rm -rf "$DATA_DIR"*
fi fi
} }
@ -180,7 +179,7 @@ for i in "${arr[@]}"; do
done done
if [[ (-z $TEST || $TEST == "rpc") && $TEST_FAIL -ne 0 ]]; then if [[ (-z $TEST || $TEST == "rpc") && $TEST_FAIL -ne 0 ]]; then
exit $TEST_FAIL exit "$TEST_FAIL"
else else
exit 0 exit 0
fi fi

View File

@ -3,7 +3,7 @@
# "stable" mode tests assume data is static # "stable" mode tests assume data is static
# "live" mode tests assume data dynamic # "live" mode tests assume data dynamic
SCRIPT=$(basename ${BASH_SOURCE[0]}) SCRIPT=$(basename "${BASH_SOURCE[0]}")
TEST="" TEST=""
QTD=1 QTD=1
SLEEP_TIMEOUT=5 SLEEP_TIMEOUT=5
@ -30,7 +30,6 @@ usage() {
echo "-q <number> -- Quantity of nodes to run. eg: 3" echo "-q <number> -- Quantity of nodes to run. eg: 3"
echo "-z <number> -- Quantity of nodes to run tests against eg: 3" echo "-z <number> -- Quantity of nodes to run tests against eg: 3"
echo "-s <number> -- Sleep between operations in secs. eg: 5" echo "-s <number> -- Sleep between operations in secs. eg: 5"
exit 1
} }
while getopts "h?t:q:z:s:" args; do while getopts "h?t:q:z:s:" args; do
@ -151,7 +150,7 @@ for i in "${arr[@]}"; do
done done
if [[ (-z $TEST || $TEST == "rpc") && $TEST_FAIL -ne 0 ]]; then if [[ (-z $TEST || $TEST == "rpc") && $TEST_FAIL -ne 0 ]]; then
exit $TEST_FAIL exit "$TEST_FAIL"
else else
exit 0 exit 0
fi fi

View File

@ -27,30 +27,30 @@ USER4_MNEMONIC="doll midnight silk carpet brush boring pluck office gown inquiry
rm -rf ~/.laconic* rm -rf ~/.laconic*
# Import keys from mnemonics # Import keys from mnemonics
echo $VAL_MNEMONIC | laconicd keys add $VAL_KEY --recover --keyring-backend test --algo "eth_secp256k1" echo "$VAL_MNEMONIC" | laconicd keys add $VAL_KEY --recover --keyring-backend test --algo "eth_secp256k1"
echo $USER1_MNEMONIC | laconicd keys add $USER1_KEY --recover --keyring-backend test --algo "eth_secp256k1" echo "$USER1_MNEMONIC" | laconicd keys add $USER1_KEY --recover --keyring-backend test --algo "eth_secp256k1"
echo $USER2_MNEMONIC | laconicd keys add $USER2_KEY --recover --keyring-backend test --algo "eth_secp256k1" echo "$USER2_MNEMONIC" | laconicd keys add $USER2_KEY --recover --keyring-backend test --algo "eth_secp256k1"
echo $USER3_MNEMONIC | laconicd keys add $USER3_KEY --recover --keyring-backend test --algo "eth_secp256k1" echo "$USER3_MNEMONIC" | laconicd keys add $USER3_KEY --recover --keyring-backend test --algo "eth_secp256k1"
echo $USER4_MNEMONIC | laconicd keys add $USER4_KEY --recover --keyring-backend test --algo "eth_secp256k1" echo "$USER4_MNEMONIC" | laconicd keys add $USER4_KEY --recover --keyring-backend test --algo "eth_secp256k1"
laconicd init $MONIKER --chain-id $CHAINID laconicd init $MONIKER --chain-id $CHAINID
# Set gas limit in genesis # Set gas limit in genesis
cat $HOME/.laconicd/config/genesis.json | jq '.consensus_params["block"]["max_gas"]="10000000"' > $HOME/.laconicd/config/tmp_genesis.json && mv $HOME/.laconicd/config/tmp_genesis.json $HOME/.laconicd/config/genesis.json < "$HOME"/.laconicd/config/genesis.json jq '.consensus_params["block"]["max_gas"]="10000000"' > "$HOME"/.laconicd/config/tmp_genesis.json && mv "$HOME"/.laconicd/config/tmp_genesis.json "$HOME"/.laconicd/config/genesis.json
# modified default configs # modified default configs
if [[ "$OSTYPE" == "darwin"* ]]; then if [[ "$OSTYPE" == "darwin"* ]]; then
sed -i '' 's/create_empty_blocks = true/create_empty_blocks = false/g' $HOME/.laconicd/config/config.toml sed -i '' 's/create_empty_blocks = true/create_empty_blocks = false/g' "$HOME"/.laconicd/config/config.toml
sed -i '' 's/prometheus-retention-time = 0/prometheus-retention-time = 1000000000000/g' $HOME/.laconicd/config/app.toml sed -i '' 's/prometheus-retention-time = 0/prometheus-retention-time = 1000000000000/g' "$HOME"/.laconicd/config/app.toml
sed -i '' 's/enabled = false/enabled = true/g' $HOME/.laconicd/config/app.toml sed -i '' 's/enabled = false/enabled = true/g' "$HOME"/.laconicd/config/app.toml
sed -i '' 's/prometheus = false/prometheus = true/' $HOME/.laconicd/config/config.toml sed -i '' 's/prometheus = false/prometheus = true/' "$HOME"/.laconicd/config/config.toml
sed -i '' 's/timeout_commit = "5s"/timeout_commit = "1s"/g' $HOME/.laconicd/config/config.toml sed -i '' 's/timeout_commit = "5s"/timeout_commit = "1s"/g' "$HOME"/.laconicd/config/config.toml
else else
sed -i 's/create_empty_blocks = true/create_empty_blocks = false/g' $HOME/.laconicd/config/config.toml sed -i 's/create_empty_blocks = true/create_empty_blocks = false/g' "$HOME"/.laconicd/config/config.toml
sed -i 's/prometheus-retention-time = "0"/prometheus-retention-time = "1000000000000"/g' $HOME/.laconicd/config/app.toml sed -i 's/prometheus-retention-time = "0"/prometheus-retention-time = "1000000000000"/g' "$HOME"/.laconicd/config/app.toml
sed -i 's/enabled = false/enabled = true/g' $HOME/.laconicd/config/app.toml sed -i 's/enabled = false/enabled = true/g' "$HOME"/.laconicd/config/app.toml
sed -i 's/prometheus = false/prometheus = true/' $HOME/.laconicd/config/config.toml sed -i 's/prometheus = false/prometheus = true/' "$HOME"/.laconicd/config/config.toml
sed -i 's/timeout_commit = "5s"/timeout_commit = "1s"/g' $HOME/.laconicd/config/config.toml sed -i 's/timeout_commit = "5s"/timeout_commit = "1s"/g' "$HOME"/.laconicd/config/config.toml
fi fi
# Allocate genesis accounts (cosmos formatted addresses) # Allocate genesis accounts (cosmos formatted addresses)

View File

@ -1,5 +1,5 @@
const Migrations = artifacts.require("Migrations"); const Migrations = artifacts.require('Migrations')
module.exports = function (deployer) { module.exports = function (deployer) {
deployer.deploy(Migrations); deployer.deploy(Migrations)
}; }

View File

@ -1,22 +1,22 @@
const Counter = artifacts.require("Counter") const Counter = artifacts.require('Counter')
const truffleAssert = require('truffle-assertions'); const truffleAssert = require('truffle-assertions')
async function expectRevert(promise) { async function expectRevert (promise) {
try { try {
await promise; await promise
} catch (error) { } catch (error) {
if (error.message.indexOf('revert') === -1) { if (error.message.indexOf('revert') === -1) {
expect('revert').to.equal(error.message, 'Wrong kind of exception received'); expect('revert').to.equal(error.message, 'Wrong kind of exception received')
} }
return; return
} }
expect.fail('Expected an exception but none was received'); expect.fail('Expected an exception but none was received')
} }
contract('Counter', (accounts) => { contract('Counter', (accounts) => {
console.log(`Using Accounts (${accounts.length}): \n${accounts.join('\n')}`); console.log(`Using Accounts (${accounts.length}): \n${accounts.join('\n')}`)
console.log('==========================\n'); console.log('==========================\n')
const [one, two, three] = accounts; const [one, two, three] = accounts
let counter let counter
beforeEach(async () => { beforeEach(async () => {
@ -76,14 +76,13 @@ contract('Counter', (accounts) => {
// Check lifecycle of events // Check lifecycle of events
const contract = new web3.eth.Contract(counter.abi, counter.address) const contract = new web3.eth.Contract(counter.abi, counter.address)
const allEvents = await contract.getPastEvents("allEvents", { fromBlock: 1, toBlock: 'latest' }) const allEvents = await contract.getPastEvents('allEvents', { fromBlock: 1, toBlock: 'latest' })
const changedEvents = await contract.getPastEvents("Changed", { fromBlock: 1, toBlock: 'latest' }) const changedEvents = await contract.getPastEvents('Changed', { fromBlock: 1, toBlock: 'latest' })
console.log('allEvents', allEvents) console.log('allEvents', allEvents)
console.log('changedEvents', changedEvents) console.log('changedEvents', changedEvents)
assert.equal(allEvents.length, 3) assert.equal(allEvents.length, 3)
assert.equal(changedEvents.length, 2) assert.equal(changedEvents.length, 2)
await expectRevert(counter.subtract()); await expectRevert(counter.subtract())
}) })
}) })

View File

@ -2,16 +2,16 @@ module.exports = {
networks: { networks: {
// Development network is just left as truffle's default settings // Development network is just left as truffle's default settings
ethermint: { ethermint: {
host: "127.0.0.1", // Localhost (default: none) host: '127.0.0.1', // Localhost (default: none)
port: 8545, // Standard Ethereum port (default: none) port: 8545, // Standard Ethereum port (default: none)
network_id: "*", // Any network (default: none) network_id: '*', // Any network (default: none)
gas: 5000000, // Gas sent with each transaction gas: 5000000, // Gas sent with each transaction
gasPrice: 1000000000, // 1 gwei (in wei) gasPrice: 1000000000 // 1 gwei (in wei)
}, }
}, },
compilers: { compilers: {
solc: { solc: {
version: "0.5.17", version: '0.5.17'
}, }
}, }
} }

View File

@ -1,20 +1,18 @@
contract('Transaction', async function(accounts) { contract('Transaction', async function (accounts) {
it('should send a transaction with EIP-1559 flag', async function () {
it('should send a transaction with EIP-1559 flag', async function() { console.log('Accounts: ', accounts)
console.log(`Accounts: `, accounts); console.log(web3.version)
console.log(web3.version);
const tx = await web3.eth.sendTransaction({ const tx = await web3.eth.sendTransaction({
from: accounts[0], from: accounts[0],
to: !!accounts[1] ? accounts[1] : "0x0000000000000000000000000000000000000000", to: accounts[1] ? accounts[1] : '0x0000000000000000000000000000000000000000',
value: '10000000', value: '10000000',
gas: '21000', gas: '21000',
type: "0x2", type: '0x2',
common: { common: {
hardfork: 'london' hardfork: 'london'
} }
}); })
console.log(tx); console.log(tx)
// assert.equal(tx.type, '0x2', 'Tx type should be 0x2'); // assert.equal(tx.type, '0x2', 'Tx type should be 0x2');
}); })
})
});

View File

@ -4,16 +4,16 @@ module.exports = {
networks: { networks: {
// Development network is just left as truffle's default settings // Development network is just left as truffle's default settings
ethermint: { ethermint: {
host: "127.0.0.1", // Localhost (default: none) host: '127.0.0.1', // Localhost (default: none)
port: 8545, // Standard Ethereum port (default: none) port: 8545, // Standard Ethereum port (default: none)
network_id: "*", // Any network (default: none) network_id: '*', // Any network (default: none)
gas: 5000000, // Gas sent with each transaction gas: 5000000, // Gas sent with each transaction
gasPrice: 1000000000, // 1 gwei (in wei) gasPrice: 1000000000 // 1 gwei (in wei)
}, }
}, },
compilers: { compilers: {
solc: { solc: {
version: "0.5.17", version: '0.5.17'
}, }
}, }
} }

View File

@ -1,5 +1,5 @@
const Migrations = artifacts.require("Migrations"); const Migrations = artifacts.require('Migrations')
module.exports = function (deployer) { module.exports = function (deployer) {
deployer.deploy(Migrations); deployer.deploy(Migrations)
}; }

View File

@ -1,16 +1,16 @@
const TestRevert = artifacts.require("TestRevert") const TestRevert = artifacts.require('TestRevert')
const truffleAssert = require('truffle-assertions'); const truffleAssert = require('truffle-assertions')
async function expectRevert(promise) { async function expectRevert (promise) {
try { try {
await promise; await promise
} catch (error) { } catch (error) {
if (error.message.indexOf('revert') === -1) { if (error.message.indexOf('revert') === -1) {
expect('revert').to.equal(error.message, 'Wrong kind of exception received'); expect('revert').to.equal(error.message, 'Wrong kind of exception received')
} }
return; return
} }
expect.fail('Expected an exception but none was received'); expect.fail('Expected an exception but none was received')
} }
contract('TestRevert', (accounts) => { contract('TestRevert', (accounts) => {

View File

@ -2,16 +2,16 @@ module.exports = {
networks: { networks: {
// Development network is just left as truffle's default settings // Development network is just left as truffle's default settings
ethermint: { ethermint: {
host: "127.0.0.1", // Localhost (default: none) host: '127.0.0.1', // Localhost (default: none)
port: 8545, // Standard Ethereum port (default: none) port: 8545, // Standard Ethereum port (default: none)
network_id: "*", // Any network (default: none) network_id: '*', // Any network (default: none)
gas: 5000000, // Gas sent with each transaction gas: 5000000, // Gas sent with each transaction
gasPrice: 1000000000, // 1 gwei (in wei) gasPrice: 1000000000 // 1 gwei (in wei)
}, }
}, },
compilers: { compilers: {
solc: { solc: {
version: "0.8.6", version: '0.8.6'
}, }
}, }
} }

View File

@ -1,6 +1,6 @@
const { usePlugin } = require('@nomiclabs/buidler/config') const { usePlugin } = require('@nomiclabs/buidler/config')
usePlugin("@nomiclabs/buidler-ganache") usePlugin('@nomiclabs/buidler-ganache')
usePlugin('@nomiclabs/buidler-truffle5') usePlugin('@nomiclabs/buidler-truffle5')
module.exports = { module.exports = {
@ -15,14 +15,14 @@ module.exports = {
ethermint: { ethermint: {
url: 'http://localhost:8545', url: 'http://localhost:8545',
gasLimit: 5000000, // Gas sent with each transaction gasLimit: 5000000, // Gas sent with each transaction
gasPrice: 1000000000, // 1 gwei (in wei) gasPrice: 1000000000 // 1 gwei (in wei)
}, }
}, },
solc: { solc: {
version: '0.4.24', version: '0.4.24',
optimizer: { optimizer: {
enabled: true, enabled: true,
runs: 10000, runs: 10000
}, }
}, }
} }

View File

@ -4,7 +4,7 @@ const { assertRevert } = require('@aragon/contract-helpers-test/src/asserts')
const LifecycleMock = artifacts.require('LifecycleMock') const LifecycleMock = artifacts.require('LifecycleMock')
const ERRORS = { const ERRORS = {
INIT_ALREADY_INITIALIZED: 'INIT_ALREADY_INITIALIZED', INIT_ALREADY_INITIALIZED: 'INIT_ALREADY_INITIALIZED'
} }
contract('Lifecycle', () => { contract('Lifecycle', () => {
@ -36,11 +36,11 @@ contract('Lifecycle', () => {
}) })
it('cannot be re-initialized', async () => { it('cannot be re-initialized', async () => {
await assertRevert(lifecycle.initializeMock()/*, ERRORS.INIT_ALREADY_INITIALIZED*/) await assertRevert(lifecycle.initializeMock()/*, ERRORS.INIT_ALREADY_INITIALIZED */)
}) })
it('cannot be petrified', async () => { it('cannot be petrified', async () => {
await assertRevert(lifecycle.petrifyMock()/*, ERRORS.INIT_ALREADY_INITIALIZED*/) await assertRevert(lifecycle.petrifyMock()/*, ERRORS.INIT_ALREADY_INITIALIZED */)
}) })
}) })
@ -58,7 +58,7 @@ contract('Lifecycle', () => {
}) })
it('cannot be petrified again', async () => { it('cannot be petrified again', async () => {
await assertRevert(lifecycle.petrifyMock()/*, ERRORS.INIT_ALREADY_INITIALIZED*/) await assertRevert(lifecycle.petrifyMock()/*, ERRORS.INIT_ALREADY_INITIALIZED */)
}) })
it('has initialization block in the future', async () => { it('has initialization block in the future', async () => {

View File

@ -1,5 +1,5 @@
const Migrations = artifacts.require("Migrations"); const Migrations = artifacts.require('Migrations')
module.exports = function (deployer) { module.exports = function (deployer) {
deployer.deploy(Migrations); deployer.deploy(Migrations)
}; }

View File

@ -4,7 +4,7 @@ const { assertRevert } = require('@aragon/contract-helpers-test/src/asserts')
const LifecycleMock = artifacts.require('LifecycleMock') const LifecycleMock = artifacts.require('LifecycleMock')
const ERRORS = { const ERRORS = {
INIT_ALREADY_INITIALIZED: 'INIT_ALREADY_INITIALIZED', INIT_ALREADY_INITIALIZED: 'INIT_ALREADY_INITIALIZED'
} }
contract('Lifecycle', () => { contract('Lifecycle', () => {
@ -36,11 +36,11 @@ contract('Lifecycle', () => {
}) })
it('cannot be re-initialized', async () => { it('cannot be re-initialized', async () => {
await assertRevert(lifecycle.initializeMock()/*, ERRORS.INIT_ALREADY_INITIALIZED*/) await assertRevert(lifecycle.initializeMock()/*, ERRORS.INIT_ALREADY_INITIALIZED */)
}) })
it('cannot be petrified', async () => { it('cannot be petrified', async () => {
await assertRevert(lifecycle.petrifyMock()/*, ERRORS.INIT_ALREADY_INITIALIZED*/) await assertRevert(lifecycle.petrifyMock()/*, ERRORS.INIT_ALREADY_INITIALIZED */)
}) })
}) })
@ -58,7 +58,7 @@ contract('Lifecycle', () => {
}) })
it('cannot be petrified again', async () => { it('cannot be petrified again', async () => {
await assertRevert(lifecycle.petrifyMock()/*, ERRORS.INIT_ALREADY_INITIALIZED*/) await assertRevert(lifecycle.petrifyMock()/*, ERRORS.INIT_ALREADY_INITIALIZED */)
}) })
it('has initialization block in the future', async () => { it('has initialization block in the future', async () => {

View File

@ -2,22 +2,22 @@ module.exports = {
networks: { networks: {
// Development network is just left as truffle's default settings // Development network is just left as truffle's default settings
ethermint: { ethermint: {
host: "127.0.0.1", // Localhost (default: none) host: '127.0.0.1', // Localhost (default: none)
port: 8545, // Standard Ethereum port (default: none) port: 8545, // Standard Ethereum port (default: none)
network_id: "*", // Any network (default: none) network_id: '*', // Any network (default: none)
gas: 5000000, // Gas sent with each transaction gas: 5000000, // Gas sent with each transaction
gasPrice: 1000000000, // 1 gwei (in wei) gasPrice: 1000000000 // 1 gwei (in wei)
}, }
}, },
compilers: { compilers: {
solc: { solc: {
version: "0.4.24", version: '0.4.24',
settings: { settings: {
optimizer: { optimizer: {
enabled: true, enabled: true,
runs: 10000, runs: 10000
}, }
}, }
}, }
}, }
} }

View File

@ -1,5 +1,5 @@
const Migrations = artifacts.require("Migrations"); const Migrations = artifacts.require('Migrations')
module.exports = function(deployer) { module.exports = function (deployer) {
deployer.deploy(Migrations); deployer.deploy(Migrations)
}; }

View File

@ -1,5 +1,5 @@
var OpCodes = artifacts.require("./OpCodes.sol"); const OpCodes = artifacts.require('./OpCodes.sol')
module.exports = function(deployer) { module.exports = function (deployer) {
deployer.deploy(OpCodes); deployer.deploy(OpCodes)
}; }

View File

@ -1,8 +1,8 @@
const TodoList = artifacts.require('./OpCodes.sol') const TodoList = artifacts.require('./OpCodes.sol')
const assert = require('assert') const assert = require('assert')
let contractInstance let contractInstance
const Web3 = require('web3'); const Web3 = require('web3')
const web3 = new Web3(new Web3.providers.HttpProvider('http://localhost:8545')); const web3 = new Web3(new Web3.providers.HttpProvider('http://localhost:8545'))
// const web3 = new Web3(new Web3.providers.HttpProvider('http://localhost:9545')); // const web3 = new Web3(new Web3.providers.HttpProvider('http://localhost:9545'));
contract('OpCodes', (accounts) => { contract('OpCodes', (accounts) => {
@ -16,19 +16,18 @@ contract('OpCodes', (accounts) => {
// }) // })
it('Should throw invalid op code', async () => { it('Should throw invalid op code', async () => {
try{ try {
await contractInstance.test_invalid() await contractInstance.test_invalid()
} } catch (error) {
catch(error) { console.error(error)
console.error(error);
} }
}) })
it('Should revert', async () => { it('Should revert', async () => {
try{ try {
await contractInstance.test_revert() } await contractInstance.test_revert()
catch(error) { } catch (error) {
console.error(error); console.error(error)
} }
}) })
}) })

View File

@ -2,17 +2,16 @@ module.exports = {
networks: { networks: {
// Development network is just left as truffle's default settings // Development network is just left as truffle's default settings
ethermint: { ethermint: {
host: "127.0.0.1", // Localhost (default: none) host: '127.0.0.1', // Localhost (default: none)
port: 8545, // Standard Ethereum port (default: none) port: 8545, // Standard Ethereum port (default: none)
network_id: "*", // Any network (default: none) network_id: '*', // Any network (default: none)
gas: 5000000, // Gas sent with each transaction gas: 5000000, // Gas sent with each transaction
gasPrice: 1000000000, // 1 gwei (in wei) gasPrice: 1000000000 // 1 gwei (in wei)
}, }
}, },
compilers: { compilers: {
solc: { solc: {
version: "0.5.17", version: '0.5.17'
},
},
} }
}
}

View File

@ -1,5 +1,5 @@
const Migrations = artifacts.require("Migrations"); const Migrations = artifacts.require('Migrations')
module.exports = function (deployer) { module.exports = function (deployer) {
deployer.deploy(Migrations); deployer.deploy(Migrations)
}; }

View File

@ -13,26 +13,26 @@ const PROXY_FORWARD_GAS = TX_BASE_GAS + 2e6 // high gas amount to ensure that th
const FALLBACK_SETUP_GAS = 100 // rough estimation of how much gas it spends before executing the fallback code const FALLBACK_SETUP_GAS = 100 // rough estimation of how much gas it spends before executing the fallback code
const SOLIDITY_TRANSFER_GAS = 2300 const SOLIDITY_TRANSFER_GAS = 2300
async function assertOutOfGas(blockOrPromise) { async function assertOutOfGas (blockOrPromise) {
try { try {
typeof blockOrPromise === 'function' typeof blockOrPromise === 'function'
? await blockOrPromise() ? await blockOrPromise()
: await blockOrPromise; : await blockOrPromise
} catch (error) { } catch (error) {
const errorMatchesExpected = const errorMatchesExpected =
error.message.search('out of gas') !== -1 || error.message.search('out of gas') !== -1 ||
error.message.search('consuming all gas') !== -1; error.message.search('consuming all gas') !== -1
assert( assert(
errorMatchesExpected, errorMatchesExpected,
`Expected error code "out of gas" or "consuming all gas" but failed with "${error}" instead.` `Expected error code "out of gas" or "consuming all gas" but failed with "${error}" instead.`
); )
return error; return error
} }
assert(false, `Expected "out of gas" or "consuming all gas" but it did not fail`); assert(false, 'Expected "out of gas" or "consuming all gas" but it did not fail')
} }
contract('DepositableDelegateProxy', ([ sender ]) => { contract('DepositableDelegateProxy', ([sender]) => {
let ethSender, proxy, target, proxyTargetWithoutFallbackBase, proxyTargetWithFallbackBase let ethSender, proxy, target, proxyTargetWithoutFallbackBase, proxyTargetWithFallbackBase
// Initial setup // Initial setup

View File

@ -2,22 +2,22 @@ module.exports = {
networks: { networks: {
// Development network is just left as truffle's default settings // Development network is just left as truffle's default settings
ethermint: { ethermint: {
host: "127.0.0.1", // Localhost (default: none) host: '127.0.0.1', // Localhost (default: none)
port: 8545, // Standard Ethereum port (default: none) port: 8545, // Standard Ethereum port (default: none)
network_id: "*", // Any network (default: none) network_id: '*', // Any network (default: none)
gas: 5000000, // Gas sent with each transaction gas: 5000000, // Gas sent with each transaction
gasPrice: 1000000000, // 1 gwei (in wei) gasPrice: 1000000000 // 1 gwei (in wei)
}, }
}, },
compilers: { compilers: {
solc: { solc: {
version: "0.4.24", version: '0.4.24',
settings: { settings: {
optimizer: { optimizer: {
enabled: true, enabled: true,
runs: 10000, runs: 10000
}, }
}, }
}, }
}, }
} }

View File

@ -29,24 +29,24 @@ contract('Staking app, Approve and call fallback', ([owner, user]) => {
const finalUserBalance = await token.balanceOf(user) const finalUserBalance = await token.balanceOf(user)
const finalStakingBalance = await token.balanceOf(stakingAddress) const finalStakingBalance = await token.balanceOf(stakingAddress)
assertBn(finalUserBalance, initialUserBalance.sub(DEFAULT_STAKE_AMOUNT), "user balance should match") assertBn(finalUserBalance, initialUserBalance.sub(DEFAULT_STAKE_AMOUNT), 'user balance should match')
assertBn(finalStakingBalance, initialStakingBalance.add(DEFAULT_STAKE_AMOUNT), "Staking app balance should match") assertBn(finalStakingBalance, initialStakingBalance.add(DEFAULT_STAKE_AMOUNT), 'Staking app balance should match')
assertBn(await staking.totalStakedFor(user), DEFAULT_STAKE_AMOUNT, "staked value should match") assertBn(await staking.totalStakedFor(user), DEFAULT_STAKE_AMOUNT, 'staked value should match')
// total stake // total stake
assertBn(await staking.totalStaked(), DEFAULT_STAKE_AMOUNT, "Total stake should match") assertBn(await staking.totalStaked(), DEFAULT_STAKE_AMOUNT, 'Total stake should match')
}) })
it('fails staking 0 amount through approveAndCall', async () => { it('fails staking 0 amount through approveAndCall', async () => {
await assertRevert(token.approveAndCall(stakingAddress, 0, EMPTY_DATA, { from: user })/*, STAKING_ERRORS.ERROR_AMOUNT_ZERO*/) await assertRevert(token.approveAndCall(stakingAddress, 0, EMPTY_DATA, { from: user })/*, STAKING_ERRORS.ERROR_AMOUNT_ZERO */)
}) })
it('fails calling approveAndCall on a different token', async () => { it('fails calling approveAndCall on a different token', async () => {
const token2 = await MiniMeToken.new(ZERO_ADDRESS, ZERO_ADDRESS, 0, 'Test Token 2', 18, 'TT2', true) const token2 = await MiniMeToken.new(ZERO_ADDRESS, ZERO_ADDRESS, 0, 'Test Token 2', 18, 'TT2', true)
await token2.generateTokens(user, DEFAULT_STAKE_AMOUNT) await token2.generateTokens(user, DEFAULT_STAKE_AMOUNT)
await assertRevert(token2.approveAndCall(stakingAddress, 0, EMPTY_DATA, { from: user })/*, STAKING_ERRORS.ERROR_WRONG_TOKEN*/) await assertRevert(token2.approveAndCall(stakingAddress, 0, EMPTY_DATA, { from: user })/*, STAKING_ERRORS.ERROR_WRONG_TOKEN */)
}) })
it('fails calling receiveApproval from a different account than the token', async () => { it('fails calling receiveApproval from a different account than the token', async () => {
await assertRevert(staking.receiveApproval(user, DEFAULT_STAKE_AMOUNT, tokenAddress, EMPTY_DATA)/*, STAKING_ERRORS.ERROR_TOKEN_NOT_SENDER*/) await assertRevert(staking.receiveApproval(user, DEFAULT_STAKE_AMOUNT, tokenAddress, EMPTY_DATA)/*, STAKING_ERRORS.ERROR_TOKEN_NOT_SENDER */)
}) })
}) })

View File

@ -1,6 +1,6 @@
const CHECKPOINT_ERRORS = { const CHECKPOINT_ERRORS = {
ERROR_VALUE_TOO_BIG: 'CHECKPOINT_VALUE_TOO_BIG', ERROR_VALUE_TOO_BIG: 'CHECKPOINT_VALUE_TOO_BIG',
ERROR_CANNOT_ADD_PAST_VALUE: 'CHECKPOINT_CANNOT_ADD_PAST_VALUE', ERROR_CANNOT_ADD_PAST_VALUE: 'CHECKPOINT_CANNOT_ADD_PAST_VALUE'
} }
const STAKING_ERRORS = { const STAKING_ERRORS = {
@ -20,16 +20,16 @@ const STAKING_ERRORS = {
ERROR_CANNOT_UNLOCK: 'STAKING_CANNOT_UNLOCK', ERROR_CANNOT_UNLOCK: 'STAKING_CANNOT_UNLOCK',
ERROR_CANNOT_CHANGE_ALLOWANCE: 'STAKING_CANNOT_CHANGE_ALLOWANCE', ERROR_CANNOT_CHANGE_ALLOWANCE: 'STAKING_CANNOT_CHANGE_ALLOWANCE',
ERROR_LOCKMANAGER_CALL_FAIL: 'STAKING_LOCKMANAGER_CALL_FAIL', ERROR_LOCKMANAGER_CALL_FAIL: 'STAKING_LOCKMANAGER_CALL_FAIL',
ERROR_BLOCKNUMBER_TOO_BIG: 'STAKING_BLOCKNUMBER_TOO_BIG', ERROR_BLOCKNUMBER_TOO_BIG: 'STAKING_BLOCKNUMBER_TOO_BIG'
} }
const TIME_LOCK_MANAGER_ERRORS = { const TIME_LOCK_MANAGER_ERRORS = {
ERROR_ALREADY_LOCKED: 'TLM_ALREADY_LOCKED', ERROR_ALREADY_LOCKED: 'TLM_ALREADY_LOCKED',
ERROR_WRONG_INTERVAL: 'TLM_WRONG_INTERVAL', ERROR_WRONG_INTERVAL: 'TLM_WRONG_INTERVAL'
} }
module.exports = { module.exports = {
CHECKPOINT_ERRORS, CHECKPOINT_ERRORS,
STAKING_ERRORS, STAKING_ERRORS,
TIME_LOCK_MANAGER_ERRORS, TIME_LOCK_MANAGER_ERRORS
} }

View File

@ -31,7 +31,7 @@ module.exports = (artifacts) => {
// funds flows helpers // funds flows helpers
function UserState(address, walletBalance) { function UserState (address, walletBalance) {
this.address = address this.address = address
this.walletBalance = walletBalance this.walletBalance = walletBalance
this.stakedBalance = bn(0) this.stakedBalance = bn(0)
@ -230,7 +230,7 @@ module.exports = (artifacts) => {
*/ */
) )
await assertRevert( await assertRevert(
staking.slashAndUnstake(user.address, to, user.lockedBalance.add(bn(1)), { from: managers[i] }),/* staking.slashAndUnstake(user.address, to, user.lockedBalance.add(bn(1)), { from: managers[i] })/*
STAKING_ERRORS.ERROR_NOT_ENOUGH_LOCK STAKING_ERRORS.ERROR_NOT_ENOUGH_LOCK
*/ */
) )
@ -239,12 +239,12 @@ module.exports = (artifacts) => {
const lockManagerAddress = managers[managers.length - 1] const lockManagerAddress = managers[managers.length - 1]
const lockManager = await LockManagerMock.at(lockManagerAddress) const lockManager = await LockManagerMock.at(lockManagerAddress)
await assertRevert( await assertRevert(
lockManager.slash(staking.address, user.address, to, user.lockedBalance.add(bn(1))),/* lockManager.slash(staking.address, user.address, to, user.lockedBalance.add(bn(1)))/*
STAKING_ERRORS.ERROR_NOT_ENOUGH_LOCK STAKING_ERRORS.ERROR_NOT_ENOUGH_LOCK
*/ */
) )
await assertRevert( await assertRevert(
lockManager.slashAndUnstake(staking.address, user.address, to, user.lockedBalance.add(bn(1))),/* lockManager.slashAndUnstake(staking.address, user.address, to, user.lockedBalance.add(bn(1)))/*
STAKING_ERRORS.ERROR_NOT_ENOUGH_LOCK STAKING_ERRORS.ERROR_NOT_ENOUGH_LOCK
*/ */
) )
@ -278,6 +278,6 @@ module.exports = (artifacts) => {
slashAndUnstakeWithState, slashAndUnstakeWithState,
slashFromContractWithState, slashFromContractWithState,
slashAndUnstakeFromContractWithState, slashAndUnstakeFromContractWithState,
checkInvariants, checkInvariants
} }
} }

View File

@ -31,7 +31,7 @@ contract('Checkpointing', () => {
}) })
context('when the given time is greater than zero', async () => { context('when the given time is greater than zero', async () => {
const time= bn(1) const time = bn(1)
it('adds the new value', async () => { it('adds the new value', async () => {
await checkpointing.add(time, value) await checkpointing.add(time, value)
@ -49,15 +49,15 @@ contract('Checkpointing', () => {
}) })
context('when the given time is previous to the latest registered value', async () => { context('when the given time is previous to the latest registered value', async () => {
const time= bn(40) const time = bn(40)
it('reverts', async () => { it('reverts', async () => {
await assertRevert(checkpointing.add(time, value)/*, CHECKPOINT_ERRORS.CANNOT_ADD_PAST_VALUE*/) await assertRevert(checkpointing.add(time, value)/*, CHECKPOINT_ERRORS.CANNOT_ADD_PAST_VALUE */)
}) })
}) })
context('when the given time is equal to the latest registered value', async () => { context('when the given time is equal to the latest registered value', async () => {
const time= bn(90) const time = bn(90)
it('updates the already registered value', async () => { it('updates the already registered value', async () => {
await checkpointing.add(time, value) await checkpointing.add(time, value)
@ -68,7 +68,7 @@ contract('Checkpointing', () => {
}) })
context('when the given time is after the latest registered value', async () => { context('when the given time is after the latest registered value', async () => {
const time= bn(95) const time = bn(95)
it('adds the new last value', async () => { it('adds the new last value', async () => {
const previousLast = await checkpointing.getLast() const previousLast = await checkpointing.getLast()
@ -87,7 +87,7 @@ contract('Checkpointing', () => {
const value = MAX_UINT256 const value = MAX_UINT256
it('reverts', async () => { it('reverts', async () => {
await assertRevert(checkpointing.add(0, value)/*, CHECKPOINT_ERRORS.VALUE_TOO_BIG*/) await assertRevert(checkpointing.add(0, value)/*, CHECKPOINT_ERRORS.VALUE_TOO_BIG */)
}) })
}) })
}) })
@ -135,7 +135,7 @@ contract('Checkpointing', () => {
describe('get', () => { describe('get', () => {
context('when there are no values registered yet', () => { context('when there are no values registered yet', () => {
context('when there given time is zero', () => { context('when there given time is zero', () => {
const time= bn(0) const time = bn(0)
it('returns zero', async () => { it('returns zero', async () => {
await assertFetchedValue(time, bn(0)) await assertFetchedValue(time, bn(0))
@ -143,7 +143,7 @@ contract('Checkpointing', () => {
}) })
context('when there given time is greater than zero', () => { context('when there given time is greater than zero', () => {
const time= bn(1) const time = bn(1)
it('returns zero', async () => { it('returns zero', async () => {
await assertFetchedValue(time, bn(0)) await assertFetchedValue(time, bn(0))
@ -159,7 +159,7 @@ contract('Checkpointing', () => {
}) })
context('when there given time is zero', () => { context('when there given time is zero', () => {
const time= bn(0) const time = bn(0)
it('returns zero', async () => { it('returns zero', async () => {
await assertFetchedValue(time, bn(0)) await assertFetchedValue(time, bn(0))
@ -167,7 +167,7 @@ contract('Checkpointing', () => {
}) })
context('when the given time is previous to the time of first registered value', () => { context('when the given time is previous to the time of first registered value', () => {
const time= bn(10) const time = bn(10)
it('returns zero', async () => { it('returns zero', async () => {
await assertFetchedValue(time, bn(0)) await assertFetchedValue(time, bn(0))
@ -175,7 +175,7 @@ contract('Checkpointing', () => {
}) })
context('when the given time is equal to the time of first registered value', () => { context('when the given time is equal to the time of first registered value', () => {
const time= bn(30) const time = bn(30)
it('returns the first registered value', async () => { it('returns the first registered value', async () => {
await assertFetchedValue(time, bn(1)) await assertFetchedValue(time, bn(1))
@ -183,7 +183,7 @@ contract('Checkpointing', () => {
}) })
context('when the given time is between the times of first and the second registered values', () => { context('when the given time is between the times of first and the second registered values', () => {
const time= bn(40) const time = bn(40)
it('returns the first registered value', async () => { it('returns the first registered value', async () => {
await assertFetchedValue(time, bn(1)) await assertFetchedValue(time, bn(1))
@ -191,7 +191,7 @@ contract('Checkpointing', () => {
}) })
context('when the given time is the time of the second registered values', () => { context('when the given time is the time of the second registered values', () => {
const time= bn(50) const time = bn(50)
it('returns the second registered value', async () => { it('returns the second registered value', async () => {
await assertFetchedValue(time, bn(2)) await assertFetchedValue(time, bn(2))
@ -199,7 +199,7 @@ contract('Checkpointing', () => {
}) })
context('when the given time is between the times of second and the third registered values', () => { context('when the given time is between the times of second and the third registered values', () => {
const time= bn(60) const time = bn(60)
it('returns the second registered value', async () => { it('returns the second registered value', async () => {
await assertFetchedValue(time, bn(2)) await assertFetchedValue(time, bn(2))
@ -207,7 +207,7 @@ contract('Checkpointing', () => {
}) })
context('when the given time is equal to the time of the third registered values', () => { context('when the given time is equal to the time of the third registered values', () => {
const time= bn(90) const time = bn(90)
it('returns the third registered value', async () => { it('returns the third registered value', async () => {
await assertFetchedValue(time, bn(3)) await assertFetchedValue(time, bn(3))
@ -215,7 +215,7 @@ contract('Checkpointing', () => {
}) })
context('when the given time is after the time of the third registered values', () => { context('when the given time is after the time of the third registered values', () => {
const time= bn(100) const time = bn(100)
it('returns the third registered value', async () => { it('returns the third registered value', async () => {
await assertFetchedValue(time, bn(3)) await assertFetchedValue(time, bn(3))

View File

@ -15,7 +15,7 @@ const {
slashAndUnstakeWithState, slashAndUnstakeWithState,
slashFromContractWithState, slashFromContractWithState,
slashAndUnstakeFromContractWithState, slashAndUnstakeFromContractWithState,
checkInvariants, checkInvariants
} = require('../helpers/helpers')(artifacts) } = require('../helpers/helpers')(artifacts)
const { DEFAULT_STAKE_AMOUNT, DEFAULT_LOCK_AMOUNT, EMPTY_DATA, ZERO_ADDRESS } = require('../helpers/constants') const { DEFAULT_STAKE_AMOUNT, DEFAULT_LOCK_AMOUNT, EMPTY_DATA, ZERO_ADDRESS } = require('../helpers/constants')
@ -108,12 +108,12 @@ contract('Staking app, Locking funds flows', ([_, owner, user1, user2, user3]) =
}) })
const unlockAndUnstake = async (unlockAmount) => { const unlockAndUnstake = async (unlockAmount) => {
await unlockWithState({ staking, managerAddress: lockManagerAddress, unlockAmount, user: users[0]}) await unlockWithState({ staking, managerAddress: lockManagerAddress, unlockAmount, user: users[0] })
await unstake(stakeAmount.sub(lockAmount.sub(unlockAmount))) await unstake(stakeAmount.sub(lockAmount.sub(unlockAmount)))
} }
const unlockAndUnstakeFromManager = async (unlockAmount) => { const unlockAndUnstakeFromManager = async (unlockAmount) => {
await unlockFromManagerWithState({ staking, lockManager, unlockAmount, user: users[0]}) await unlockFromManagerWithState({ staking, lockManager, unlockAmount, user: users[0] })
await unstake(stakeAmount.sub(lockAmount.sub(unlockAmount))) await unstake(stakeAmount.sub(lockAmount.sub(unlockAmount)))
} }

View File

@ -6,7 +6,7 @@ const { approveAndStake } = require('../helpers/helpers')(artifacts)
const { DEFAULT_STAKE_AMOUNT, DEFAULT_LOCK_AMOUNT, EMPTY_DATA } = require('../helpers/constants') const { DEFAULT_STAKE_AMOUNT, DEFAULT_LOCK_AMOUNT, EMPTY_DATA } = require('../helpers/constants')
const { STAKING_ERRORS, TIME_LOCK_MANAGER_ERRORS } = require('../helpers/errors') const { STAKING_ERRORS, TIME_LOCK_MANAGER_ERRORS } = require('../helpers/errors')
const TimeLockManagerMock = artifacts.require('TimeLockManagerMock'); const TimeLockManagerMock = artifacts.require('TimeLockManagerMock')
contract('Staking app, Time locking', ([owner]) => { contract('Staking app, Time locking', ([owner]) => {
let token, staking, manager let token, staking, manager
@ -17,7 +17,7 @@ contract('Staking app, Time locking', ([owner]) => {
const DEFAULT_TIME = 1000 const DEFAULT_TIME = 1000
const DEFAULT_BLOCKS = 10 const DEFAULT_BLOCKS = 10
const approveStakeAndLock = async(unit, start, end, lockAmount = DEFAULT_LOCK_AMOUNT, stakeAmount = DEFAULT_STAKE_AMOUNT) => { const approveStakeAndLock = async (unit, start, end, lockAmount = DEFAULT_LOCK_AMOUNT, stakeAmount = DEFAULT_STAKE_AMOUNT) => {
await approveAndStake({ staking, amount: stakeAmount, from: owner }) await approveAndStake({ staking, amount: stakeAmount, from: owner })
// allow manager // allow manager
await staking.allowManager(manager.address, lockAmount, EMPTY_DATA) await staking.allowManager(manager.address, lockAmount, EMPTY_DATA)
@ -40,23 +40,23 @@ contract('Staking app, Time locking', ([owner]) => {
// check lock values // check lock values
const { _amount, _allowance } = await staking.getLock(owner, manager.address) const { _amount, _allowance } = await staking.getLock(owner, manager.address)
assertBn(_amount, DEFAULT_LOCK_AMOUNT, "locked amount should match") assertBn(_amount, DEFAULT_LOCK_AMOUNT, 'locked amount should match')
assertBn(_allowance, DEFAULT_LOCK_AMOUNT, "locked allowance should match") assertBn(_allowance, DEFAULT_LOCK_AMOUNT, 'locked allowance should match')
// check time values // check time values
const { unit, start, end } = await manager.getTimeInterval(owner) const { unit, start, end } = await manager.getTimeInterval(owner)
assert.equal(unit.toString(), TIME_UNIT_SECONDS.toString(), "interval unit should match") assert.equal(unit.toString(), TIME_UNIT_SECONDS.toString(), 'interval unit should match')
assert.equal(start.toString(), startTime.toString(), "interval start should match") assert.equal(start.toString(), startTime.toString(), 'interval start should match')
assert.equal(end.toString(), endTime.toString(), "interval end should match") assert.equal(end.toString(), endTime.toString(), 'interval end should match')
// can not unlock // can not unlock
assert.equal(await staking.canUnlock(owner, owner, manager.address, 0), false, "Shouldn't be able to unlock") assert.equal(await staking.canUnlock(owner, owner, manager.address, 0), false, "Shouldn't be able to unlock")
assertBn(await staking.unlockedBalanceOf(owner), DEFAULT_STAKE_AMOUNT.sub(DEFAULT_LOCK_AMOUNT), "Unlocked balance should match") assertBn(await staking.unlockedBalanceOf(owner), DEFAULT_STAKE_AMOUNT.sub(DEFAULT_LOCK_AMOUNT), 'Unlocked balance should match')
await manager.setTimestamp(endTime.add(bn(1))) await manager.setTimestamp(endTime.add(bn(1)))
// can unlock // can unlock
assert.equal(await staking.canUnlock(owner, owner, manager.address, 0), true, "Should be able to unlock") assert.equal(await staking.canUnlock(owner, owner, manager.address, 0), true, 'Should be able to unlock')
assertBn(await staking.unlockedBalanceOf(owner), DEFAULT_STAKE_AMOUNT.sub(DEFAULT_LOCK_AMOUNT), "Unlocked balance should match") assertBn(await staking.unlockedBalanceOf(owner), DEFAULT_STAKE_AMOUNT.sub(DEFAULT_LOCK_AMOUNT), 'Unlocked balance should match')
}) })
it('locks using blocks', async () => { it('locks using blocks', async () => {
@ -66,22 +66,22 @@ contract('Staking app, Time locking', ([owner]) => {
// check lock values // check lock values
const { _amount, _allowance } = await staking.getLock(owner, manager.address) const { _amount, _allowance } = await staking.getLock(owner, manager.address)
assertBn(_amount, DEFAULT_LOCK_AMOUNT, "locked amount should match") assertBn(_amount, DEFAULT_LOCK_AMOUNT, 'locked amount should match')
assertBn(_allowance, DEFAULT_LOCK_AMOUNT, "locked allowance should match") assertBn(_allowance, DEFAULT_LOCK_AMOUNT, 'locked allowance should match')
// check time values // check time values
const { unit, start, end } = await manager.getTimeInterval(owner) const { unit, start, end } = await manager.getTimeInterval(owner)
assert.equal(unit.toString(), TIME_UNIT_BLOCKS.toString(), "interval unit should match") assert.equal(unit.toString(), TIME_UNIT_BLOCKS.toString(), 'interval unit should match')
assert.equal(start.toString(), startBlock.toString(), "interval start should match") assert.equal(start.toString(), startBlock.toString(), 'interval start should match')
assert.equal(end.toString(), endBlock.toString(), "interval end should match") assert.equal(end.toString(), endBlock.toString(), 'interval end should match')
// can not unlock // can not unlock
assert.equal(await staking.canUnlock(owner, owner, manager.address, 0), false, "Shouldn't be able to unlock") assert.equal(await staking.canUnlock(owner, owner, manager.address, 0), false, "Shouldn't be able to unlock")
assertBn(await staking.unlockedBalanceOf(owner), DEFAULT_STAKE_AMOUNT.sub(DEFAULT_LOCK_AMOUNT), "Unlocked balance should match") assertBn(await staking.unlockedBalanceOf(owner), DEFAULT_STAKE_AMOUNT.sub(DEFAULT_LOCK_AMOUNT), 'Unlocked balance should match')
await manager.setBlockNumber(endBlock.add(bn(1))) await manager.setBlockNumber(endBlock.add(bn(1)))
// can unlock // can unlock
assert.equal(await staking.canUnlock(owner, owner, manager.address, 0), true, "Should be able to unlock") assert.equal(await staking.canUnlock(owner, owner, manager.address, 0), true, 'Should be able to unlock')
}) })
it('fails to unlock if can not unlock', async () => { it('fails to unlock if can not unlock', async () => {
@ -90,7 +90,7 @@ contract('Staking app, Time locking', ([owner]) => {
await approveStakeAndLock(TIME_UNIT_SECONDS, startTime, endTime) await approveStakeAndLock(TIME_UNIT_SECONDS, startTime, endTime)
// tries to unlock // tries to unlock
await assertRevert(staking.unlockAndRemoveManager(owner, manager.address)/*, STAKING_ERRORS.ERROR_CANNOT_UNLOCK*/) await assertRevert(staking.unlockAndRemoveManager(owner, manager.address)/*, STAKING_ERRORS.ERROR_CANNOT_UNLOCK */)
}) })
it('fails trying to lock twice', async () => { it('fails trying to lock twice', async () => {
@ -98,10 +98,9 @@ contract('Staking app, Time locking', ([owner]) => {
const endTime = startTime.add(bn(DEFAULT_TIME)) const endTime = startTime.add(bn(DEFAULT_TIME))
await approveStakeAndLock(TIME_UNIT_SECONDS, startTime, endTime) await approveStakeAndLock(TIME_UNIT_SECONDS, startTime, endTime)
await assertRevert(manager.lock(staking.address, owner, DEFAULT_LOCK_AMOUNT, TIME_UNIT_SECONDS, startTime, endTime)/*, TIME_LOCK_MANAGER_ERRORS.ERROR_ALREADY_LOCKED*/) await assertRevert(manager.lock(staking.address, owner, DEFAULT_LOCK_AMOUNT, TIME_UNIT_SECONDS, startTime, endTime)/*, TIME_LOCK_MANAGER_ERRORS.ERROR_ALREADY_LOCKED */)
}) })
it('fails trying to lock with wrong interval', async () => { it('fails trying to lock with wrong interval', async () => {
const startTime = await manager.getTimestampExt() const startTime = await manager.getTimestampExt()
const endTime = startTime.add(bn(DEFAULT_TIME)) const endTime = startTime.add(bn(DEFAULT_TIME))
@ -110,6 +109,6 @@ contract('Staking app, Time locking', ([owner]) => {
// allow manager // allow manager
await staking.allowManager(manager.address, DEFAULT_STAKE_AMOUNT, EMPTY_DATA) await staking.allowManager(manager.address, DEFAULT_STAKE_AMOUNT, EMPTY_DATA)
// times are reverted! // times are reverted!
await assertRevert(manager.lock(staking.address, owner, DEFAULT_LOCK_AMOUNT, TIME_UNIT_SECONDS, endTime, startTime)/*, TIME_LOCK_MANAGER_ERRORS.ERROR_WRONG_INTERVAL*/) await assertRevert(manager.lock(staking.address, owner, DEFAULT_LOCK_AMOUNT, TIME_UNIT_SECONDS, endTime, startTime)/*, TIME_LOCK_MANAGER_ERRORS.ERROR_WRONG_INTERVAL */)
}) })
}) })

View File

@ -7,7 +7,7 @@ const { DEFAULT_STAKE_AMOUNT, EMPTY_DATA } = require('./helpers/constants')
const { STAKING_ERRORS } = require('./helpers/errors') const { STAKING_ERRORS } = require('./helpers/errors')
const StakingMock = artifacts.require('StakingMock') const StakingMock = artifacts.require('StakingMock')
const StandardTokenMock = artifacts.require('StandardTokenMock'); const StandardTokenMock = artifacts.require('StandardTokenMock')
const BadTokenMock = artifacts.require('BadTokenMock') const BadTokenMock = artifacts.require('BadTokenMock')
const getTokenBalance = async (token, account) => await token.balanceOf(account) const getTokenBalance = async (token, account) => await token.balanceOf(account)
@ -27,13 +27,13 @@ contract('Staking app', ([owner, other]) => {
}) })
it('has correct initial state', async () => { it('has correct initial state', async () => {
assert.equal(await staking.token(), tokenAddress, "Token is wrong") assert.equal(await staking.token(), tokenAddress, 'Token is wrong')
assert.equal((await staking.totalStaked()).valueOf(), 0, "Initial total staked amount should be zero") assert.equal((await staking.totalStaked()).valueOf(), 0, 'Initial total staked amount should be zero')
assert.equal(await staking.supportsHistory(), true, "history support should match") assert.equal(await staking.supportsHistory(), true, 'history support should match')
}) })
it('fails deploying if token is not a contract', async() => { it('fails deploying if token is not a contract', async () => {
await assertRevert(StakingMock.new(owner)/*, STAKING_ERRORS.ERROR_TOKEN_NOT_CONTRACT*/) await assertRevert(StakingMock.new(owner)/*, STAKING_ERRORS.ERROR_TOKEN_NOT_CONTRACT */)
}) })
it('stakes', async () => { it('stakes', async () => {
@ -44,23 +44,23 @@ contract('Staking app', ([owner, other]) => {
const finalOwnerBalance = await getTokenBalance(token, owner) const finalOwnerBalance = await getTokenBalance(token, owner)
const finalStakingBalance = await getTokenBalance(token, stakingAddress) const finalStakingBalance = await getTokenBalance(token, stakingAddress)
assertBn(finalOwnerBalance, initialOwnerBalance.sub(bn(DEFAULT_STAKE_AMOUNT)), "owner balance should match") assertBn(finalOwnerBalance, initialOwnerBalance.sub(bn(DEFAULT_STAKE_AMOUNT)), 'owner balance should match')
assertBn(finalStakingBalance, initialStakingBalance.add(bn(DEFAULT_STAKE_AMOUNT)), "Staking app balance should match") assertBn(finalStakingBalance, initialStakingBalance.add(bn(DEFAULT_STAKE_AMOUNT)), 'Staking app balance should match')
assertBn(await staking.totalStakedFor(owner), bn(DEFAULT_STAKE_AMOUNT), "staked value should match") assertBn(await staking.totalStakedFor(owner), bn(DEFAULT_STAKE_AMOUNT), 'staked value should match')
// total stake // total stake
assertBn(await staking.totalStaked(), bn(DEFAULT_STAKE_AMOUNT), "Total stake should match") assertBn(await staking.totalStaked(), bn(DEFAULT_STAKE_AMOUNT), 'Total stake should match')
}) })
it('fails staking 0 amount', async () => { it('fails staking 0 amount', async () => {
await token.approve(stakingAddress, 1) await token.approve(stakingAddress, 1)
await assertRevert(staking.stake(0, EMPTY_DATA)/*, STAKING_ERRORS.ERROR_AMOUNT_ZERO*/) await assertRevert(staking.stake(0, EMPTY_DATA)/*, STAKING_ERRORS.ERROR_AMOUNT_ZERO */)
}) })
it('fails staking more than balance', async () => { it('fails staking more than balance', async () => {
const balance = await getTokenBalance(token, owner) const balance = await getTokenBalance(token, owner)
const amount = balance.add(bn(1)) const amount = balance.add(bn(1))
await token.approve(stakingAddress, amount) await token.approve(stakingAddress, amount)
await assertRevert(staking.stake(amount, EMPTY_DATA)/*, STAKING_ERRORS.ERROR_TOKEN_DEPOSIT*/) await assertRevert(staking.stake(amount, EMPTY_DATA)/*, STAKING_ERRORS.ERROR_TOKEN_DEPOSIT */)
}) })
it('stakes for', async () => { it('stakes for', async () => {
@ -76,11 +76,11 @@ contract('Staking app', ([owner, other]) => {
const finalOwnerBalance = await getTokenBalance(token, owner) const finalOwnerBalance = await getTokenBalance(token, owner)
const finalOtherBalance = await getTokenBalance(token, other) const finalOtherBalance = await getTokenBalance(token, other)
const finalStakingBalance = await getTokenBalance(token, stakingAddress) const finalStakingBalance = await getTokenBalance(token, stakingAddress)
assertBn(finalOwnerBalance, initialOwnerBalance.sub(bn(DEFAULT_STAKE_AMOUNT)), "owner balance should match") assertBn(finalOwnerBalance, initialOwnerBalance.sub(bn(DEFAULT_STAKE_AMOUNT)), 'owner balance should match')
assertBn(finalOtherBalance, initialOtherBalance, "other balance should match") assertBn(finalOtherBalance, initialOtherBalance, 'other balance should match')
assertBn(finalStakingBalance, initialStakingBalance.add(bn(DEFAULT_STAKE_AMOUNT)), "Staking app balance should match") assertBn(finalStakingBalance, initialStakingBalance.add(bn(DEFAULT_STAKE_AMOUNT)), 'Staking app balance should match')
assertBn(await staking.totalStakedFor(owner), bn(0), "staked value for owner should match") assertBn(await staking.totalStakedFor(owner), bn(0), 'staked value for owner should match')
assertBn(await staking.totalStakedFor(other), bn(DEFAULT_STAKE_AMOUNT), "staked value for other should match") assertBn(await staking.totalStakedFor(other), bn(DEFAULT_STAKE_AMOUNT), 'staked value for other should match')
}) })
it('unstakes', async () => { it('unstakes', async () => {
@ -94,24 +94,24 @@ contract('Staking app', ([owner, other]) => {
const finalOwnerBalance = await getTokenBalance(token, owner) const finalOwnerBalance = await getTokenBalance(token, owner)
const finalStakingBalance = await getTokenBalance(token, stakingAddress) const finalStakingBalance = await getTokenBalance(token, stakingAddress)
assertBn(finalOwnerBalance, initialOwnerBalance.sub(bn(DEFAULT_STAKE_AMOUNT.div(bn(2)))), "owner balance should match") assertBn(finalOwnerBalance, initialOwnerBalance.sub(bn(DEFAULT_STAKE_AMOUNT.div(bn(2)))), 'owner balance should match')
assertBn(finalStakingBalance, initialStakingBalance.add(bn(DEFAULT_STAKE_AMOUNT.div(bn(2)))), "Staking app balance should match") assertBn(finalStakingBalance, initialStakingBalance.add(bn(DEFAULT_STAKE_AMOUNT.div(bn(2)))), 'Staking app balance should match')
assertBn(await staking.totalStakedFor(owner), bn(DEFAULT_STAKE_AMOUNT.div(bn(2))), "staked value should match") assertBn(await staking.totalStakedFor(owner), bn(DEFAULT_STAKE_AMOUNT.div(bn(2))), 'staked value should match')
}) })
it('fails unstaking 0 amount', async () => { it('fails unstaking 0 amount', async () => {
await approveAndStake({ staking, from: owner }) await approveAndStake({ staking, from: owner })
await assertRevert(staking.unstake(0, EMPTY_DATA)/*, STAKING_ERRORS.ERROR_AMOUNT_ZERO*/) await assertRevert(staking.unstake(0, EMPTY_DATA)/*, STAKING_ERRORS.ERROR_AMOUNT_ZERO */)
}) })
it('fails unstaking more than staked', async () => { it('fails unstaking more than staked', async () => {
await approveAndStake({ staking, from: owner }) await approveAndStake({ staking, from: owner })
await assertRevert(staking.unstake(DEFAULT_STAKE_AMOUNT.add(bn(1)), EMPTY_DATA)/*, STAKING_ERRORS.ERROR_NOT_ENOUGH_BALANCE*/) await assertRevert(staking.unstake(DEFAULT_STAKE_AMOUNT.add(bn(1)), EMPTY_DATA)/*, STAKING_ERRORS.ERROR_NOT_ENOUGH_BALANCE */)
}) })
context('History', async () => { context('History', async () => {
it('supports history', async () => { it('supports history', async () => {
assert.equal(await staking.supportsHistory(), true, "It should support History") assert.equal(await staking.supportsHistory(), true, 'It should support History')
}) })
it('has correct "last staked for"', async () => { it('has correct "last staked for"', async () => {
@ -119,7 +119,7 @@ contract('Staking app', ([owner, other]) => {
const lastStaked = blockNumber.add(bn(5)) const lastStaked = blockNumber.add(bn(5))
await staking.setBlockNumber(lastStaked) await staking.setBlockNumber(lastStaked)
await approveAndStake({ staking, from: owner }) await approveAndStake({ staking, from: owner })
assertBn(await staking.lastStakedFor(owner), lastStaked, "Last staked for should match") assertBn(await staking.lastStakedFor(owner), lastStaked, 'Last staked for should match')
}) })
it('has correct "total staked for at"', async () => { it('has correct "total staked for at"', async () => {
@ -127,8 +127,8 @@ contract('Staking app', ([owner, other]) => {
const lastStaked = beforeBlockNumber.add(bn(5)) const lastStaked = beforeBlockNumber.add(bn(5))
await staking.setBlockNumber(lastStaked) await staking.setBlockNumber(lastStaked)
await approveAndStake({ staking, from: owner }) await approveAndStake({ staking, from: owner })
assertBn(await staking.totalStakedForAt(owner, beforeBlockNumber), bn(0), "Staked for at before staking should match") assertBn(await staking.totalStakedForAt(owner, beforeBlockNumber), bn(0), 'Staked for at before staking should match')
assertBn(await staking.totalStakedForAt(owner, lastStaked), bn(DEFAULT_STAKE_AMOUNT), "Staked for after staking should match") assertBn(await staking.totalStakedForAt(owner, lastStaked), bn(DEFAULT_STAKE_AMOUNT), 'Staked for after staking should match')
}) })
it('has correct "total staked at"', async () => { it('has correct "total staked at"', async () => {
@ -137,16 +137,16 @@ contract('Staking app', ([owner, other]) => {
await staking.setBlockNumber(lastStaked) await staking.setBlockNumber(lastStaked)
await approveAndStake({ staking, from: owner }) await approveAndStake({ staking, from: owner })
await approveAndStake({ staking, from: other }) await approveAndStake({ staking, from: other })
assertBn(await staking.totalStakedAt(beforeBlockNumber), bn(0), "Staked for at before should match") assertBn(await staking.totalStakedAt(beforeBlockNumber), bn(0), 'Staked for at before should match')
assertBn(await staking.totalStakedAt(lastStaked), bn(DEFAULT_STAKE_AMOUNT.mul(bn(2))), "Staked for at after staking should match") assertBn(await staking.totalStakedAt(lastStaked), bn(DEFAULT_STAKE_AMOUNT.mul(bn(2))), 'Staked for at after staking should match')
}) })
it('fails to call totalStakedForAt with block number greater than max uint64', async () => { it('fails to call totalStakedForAt with block number greater than max uint64', async () => {
await assertRevert(staking.totalStakedForAt(owner, MAX_UINT64.add(bn(1)))/*, STAKING_ERRORS.ERROR_BLOCKNUMBER_TOO_BIG*/) await assertRevert(staking.totalStakedForAt(owner, MAX_UINT64.add(bn(1)))/*, STAKING_ERRORS.ERROR_BLOCKNUMBER_TOO_BIG */)
}) })
it('fails to call totalStakedAt with block number greater than max uint64', async () => { it('fails to call totalStakedAt with block number greater than max uint64', async () => {
await assertRevert(staking.totalStakedAt(MAX_UINT64.add(bn(1)))/*, STAKING_ERRORS.ERROR_BLOCKNUMBER_TOO_BIG*/) await assertRevert(staking.totalStakedAt(MAX_UINT64.add(bn(1)))/*, STAKING_ERRORS.ERROR_BLOCKNUMBER_TOO_BIG */)
}) })
}) })
@ -170,7 +170,7 @@ contract('Staking app', ([owner, other]) => {
await badStaking.stake(DEFAULT_STAKE_AMOUNT, EMPTY_DATA, { from: owner }) await badStaking.stake(DEFAULT_STAKE_AMOUNT, EMPTY_DATA, { from: owner })
// unstake half of them, fails on token transfer // unstake half of them, fails on token transfer
await assertRevert(badStaking.unstake(DEFAULT_STAKE_AMOUNT.div(bn(2)), EMPTY_DATA)/*, STAKING_ERRORS.ERROR_TOKEN_TRANSFER*/) await assertRevert(badStaking.unstake(DEFAULT_STAKE_AMOUNT.div(bn(2)), EMPTY_DATA)/*, STAKING_ERRORS.ERROR_TOKEN_TRANSFER */)
}) })
}) })
}) })

View File

@ -85,7 +85,7 @@ contract('StakingFactory', ([_, owner, someone]) => {
const tokenAddress = ZERO_ADDRESS const tokenAddress = ZERO_ADDRESS
it('reverts', async () => { it('reverts', async () => {
await assertRevert(factory.getOrCreateInstance(tokenAddress)/*, STAKING_ERRORS.ERROR_TOKEN_NOT_CONTRACT*/) await assertRevert(factory.getOrCreateInstance(tokenAddress)/*, STAKING_ERRORS.ERROR_TOKEN_NOT_CONTRACT */)
}) })
}) })
@ -93,7 +93,7 @@ contract('StakingFactory', ([_, owner, someone]) => {
const tokenAddress = someone const tokenAddress = someone
it('reverts', async () => { it('reverts', async () => {
await assertRevert(factory.getOrCreateInstance(tokenAddress)/*, STAKING_ERRORS.ERROR_TOKEN_NOT_CONTRACT*/) await assertRevert(factory.getOrCreateInstance(tokenAddress)/*, STAKING_ERRORS.ERROR_TOKEN_NOT_CONTRACT */)
}) })
}) })
}) })

View File

@ -17,42 +17,40 @@ contract('Staking app, Transferring', ([owner, user1, user2]) => {
}) })
context('Transfers', async () => { context('Transfers', async () => {
context('From stake', async () => { context('From stake', async () => {
const transfersFromStake = (transferType) => { const transfersFromStake = (transferType) => {
it('transfers', async () => { it('transfers', async () => {
//const initialTotalStake = await staking.totalStaked() // const initialTotalStake = await staking.totalStaked()
const transferAmount = DEFAULT_STAKE_AMOUNT.div(bn(2)) const transferAmount = DEFAULT_STAKE_AMOUNT.div(bn(2))
await approveAndStake({ staking, from: owner }) await approveAndStake({ staking, from: owner })
await staking[transferType](user1, transferAmount) await staking[transferType](user1, transferAmount)
assertBn(await staking.unlockedBalanceOf(owner), DEFAULT_STAKE_AMOUNT.sub(transferAmount), "Owner balance should match") assertBn(await staking.unlockedBalanceOf(owner), DEFAULT_STAKE_AMOUNT.sub(transferAmount), 'Owner balance should match')
const userStakedBalance = transferType == 'transfer' ? transferAmount : bn(0) const userStakedBalance = transferType == 'transfer' ? transferAmount : bn(0)
assertBn(await staking.unlockedBalanceOf(user1), userStakedBalance, "User 1 unlocked balance should match") assertBn(await staking.unlockedBalanceOf(user1), userStakedBalance, 'User 1 unlocked balance should match')
const userExternalBalance = transferType == 'transfer' ? bn(0) : transferAmount const userExternalBalance = transferType == 'transfer' ? bn(0) : transferAmount
assertBn(await token.balanceOf(user1), userExternalBalance, "User 1 external balance should match") assertBn(await token.balanceOf(user1), userExternalBalance, 'User 1 external balance should match')
// total stake // total stake
const totalStaked = transferType == 'transfer' ? DEFAULT_STAKE_AMOUNT : DEFAULT_STAKE_AMOUNT.sub(transferAmount) const totalStaked = transferType == 'transfer' ? DEFAULT_STAKE_AMOUNT : DEFAULT_STAKE_AMOUNT.sub(transferAmount)
assertBn(await staking.totalStaked(), totalStaked, "Total stake should match") assertBn(await staking.totalStaked(), totalStaked, 'Total stake should match')
}) })
it('fails transferring zero tokens', async () => { it('fails transferring zero tokens', async () => {
await approveAndStake({ staking, from: owner }) await approveAndStake({ staking, from: owner })
await assertRevert(staking[transferType](user1, 0)/*, STAKING_ERRORS.ERROR_AMOUNT_ZERO*/) await assertRevert(staking[transferType](user1, 0)/*, STAKING_ERRORS.ERROR_AMOUNT_ZERO */)
}) })
it('fails transferring more than staked balance', async () => { it('fails transferring more than staked balance', async () => {
await approveAndStake({ staking, amount: DEFAULT_STAKE_AMOUNT, from: owner }) await approveAndStake({ staking, amount: DEFAULT_STAKE_AMOUNT, from: owner })
await assertRevert(staking[transferType](user1, DEFAULT_STAKE_AMOUNT.add(bn(1)))/*, STAKING_ERRORS.ERROR_NOT_ENOUGH_BALANCE*/) await assertRevert(staking[transferType](user1, DEFAULT_STAKE_AMOUNT.add(bn(1)))/*, STAKING_ERRORS.ERROR_NOT_ENOUGH_BALANCE */)
}) })
it('fails transferring more than unlocked balance', async () => { it('fails transferring more than unlocked balance', async () => {
await approveStakeAndLock({ staking, manager: lockManager.address, from: owner }) await approveStakeAndLock({ staking, manager: lockManager.address, from: owner })
await assertRevert(staking[transferType](user1, DEFAULT_STAKE_AMOUNT)/*, STAKING_ERRORS.ERROR_NOT_ENOUGH_BALANCE*/) await assertRevert(staking[transferType](user1, DEFAULT_STAKE_AMOUNT)/*, STAKING_ERRORS.ERROR_NOT_ENOUGH_BALANCE */)
}) })
} }
@ -71,62 +69,62 @@ contract('Staking app, Transferring', ([owner, user1, user2]) => {
const transferAmount = DEFAULT_LOCK_AMOUNT.div(bn(2)) const transferAmount = DEFAULT_LOCK_AMOUNT.div(bn(2))
await lockManager[transferType](staking.address, owner, user1, transferAmount) await lockManager[transferType](staking.address, owner, user1, transferAmount)
assertBn(await staking.unlockedBalanceOf(owner), DEFAULT_STAKE_AMOUNT.sub(DEFAULT_LOCK_AMOUNT), "Owner balance should match") assertBn(await staking.unlockedBalanceOf(owner), DEFAULT_STAKE_AMOUNT.sub(DEFAULT_LOCK_AMOUNT), 'Owner balance should match')
const userUnlockedBalance = transferType == 'slash' ? transferAmount : bn(0) const userUnlockedBalance = transferType == 'slash' ? transferAmount : bn(0)
assertBn(await staking.unlockedBalanceOf(user1), userUnlockedBalance, "User 1 unlocked balance should match") assertBn(await staking.unlockedBalanceOf(user1), userUnlockedBalance, 'User 1 unlocked balance should match')
const userExternalBalance = transferType == 'slash' ? bn(0) : transferAmount const userExternalBalance = transferType == 'slash' ? bn(0) : transferAmount
assertBn(await token.balanceOf(user1), userExternalBalance, "User 1 external balance should match") assertBn(await token.balanceOf(user1), userExternalBalance, 'User 1 external balance should match')
// total stake // total stake
const totalStaked = transferType == 'slash' ? DEFAULT_STAKE_AMOUNT : DEFAULT_STAKE_AMOUNT.sub(transferAmount) const totalStaked = transferType == 'slash' ? DEFAULT_STAKE_AMOUNT : DEFAULT_STAKE_AMOUNT.sub(transferAmount)
assertBn(await staking.totalStaked(), totalStaked, "Total stake should match") assertBn(await staking.totalStaked(), totalStaked, 'Total stake should match')
// check lock values // check lock values
const { _amount: amount, _data: data } = await staking.getLock(owner, lockManager.address) const { _amount: amount, _data: data } = await staking.getLock(owner, lockManager.address)
assertBn(amount, DEFAULT_LOCK_AMOUNT.sub(transferAmount), "locked amount should match") assertBn(amount, DEFAULT_LOCK_AMOUNT.sub(transferAmount), 'locked amount should match')
}) })
it('transfers the whole lock amount', async () => { it('transfers the whole lock amount', async () => {
await approveStakeAndLock({ staking, manager: lockManager.address, from: owner }) await approveStakeAndLock({ staking, manager: lockManager.address, from: owner })
await lockManager[transferType](staking.address, owner, user1, DEFAULT_LOCK_AMOUNT) await lockManager[transferType](staking.address, owner, user1, DEFAULT_LOCK_AMOUNT)
assertBn(await staking.unlockedBalanceOf(owner), DEFAULT_STAKE_AMOUNT.sub(DEFAULT_LOCK_AMOUNT), "Owner balance should match") assertBn(await staking.unlockedBalanceOf(owner), DEFAULT_STAKE_AMOUNT.sub(DEFAULT_LOCK_AMOUNT), 'Owner balance should match')
const userUnlockedBalance = transferType == 'slash' ? DEFAULT_LOCK_AMOUNT : bn(0) const userUnlockedBalance = transferType == 'slash' ? DEFAULT_LOCK_AMOUNT : bn(0)
assertBn(await staking.unlockedBalanceOf(user1), userUnlockedBalance, "User 1 unlocked balance should match") assertBn(await staking.unlockedBalanceOf(user1), userUnlockedBalance, 'User 1 unlocked balance should match')
const userExternalBalance = transferType == 'slash' ? bn(0) : DEFAULT_LOCK_AMOUNT const userExternalBalance = transferType == 'slash' ? bn(0) : DEFAULT_LOCK_AMOUNT
assertBn(await token.balanceOf(user1), userExternalBalance, "User 1 external balance should match") assertBn(await token.balanceOf(user1), userExternalBalance, 'User 1 external balance should match')
// total stake // total stake
const totalStaked = transferType == 'slash' ? DEFAULT_STAKE_AMOUNT : DEFAULT_STAKE_AMOUNT.sub(DEFAULT_LOCK_AMOUNT) const totalStaked = transferType == 'slash' ? DEFAULT_STAKE_AMOUNT : DEFAULT_STAKE_AMOUNT.sub(DEFAULT_LOCK_AMOUNT)
assertBn(await staking.totalStaked(), totalStaked, "Total stake should match") assertBn(await staking.totalStaked(), totalStaked, 'Total stake should match')
// check lock values // check lock values
const { _amount: amount, _data: data } = await staking.getLock(owner, lockManager.address) const { _amount: amount, _data: data } = await staking.getLock(owner, lockManager.address)
assertBn(amount, bn(0), "locked amount should match") assertBn(amount, bn(0), 'locked amount should match')
}) })
it('fails transferring zero tokens', async () => { it('fails transferring zero tokens', async () => {
await approveStakeAndLock({ staking, manager: lockManager.address, from: owner }) await approveStakeAndLock({ staking, manager: lockManager.address, from: owner })
await assertRevert(lockManager[transferType](staking.address, owner, user1, 0)/*, STAKING_ERRORS.ERROR_AMOUNT_ZERO*/) await assertRevert(lockManager[transferType](staking.address, owner, user1, 0)/*, STAKING_ERRORS.ERROR_AMOUNT_ZERO */)
}) })
it('fails transferring more than locked balance', async () => { it('fails transferring more than locked balance', async () => {
await approveStakeAndLock({ staking, manager: lockManager.address, from: owner }) await approveStakeAndLock({ staking, manager: lockManager.address, from: owner })
await assertRevert(lockManager[transferType](staking.address, owner, user1, DEFAULT_LOCK_AMOUNT.add(bn(1)))/*, STAKING_ERRORS.ERROR_NOT_ENOUGH_LOCK*/) await assertRevert(lockManager[transferType](staking.address, owner, user1, DEFAULT_LOCK_AMOUNT.add(bn(1)))/*, STAKING_ERRORS.ERROR_NOT_ENOUGH_LOCK */)
}) })
it('fails if sender is not manager', async () => { it('fails if sender is not manager', async () => {
await approveStakeAndLock({ staking, manager: user1, from: owner }) await approveStakeAndLock({ staking, manager: user1, from: owner })
await assertRevert(lockManager[transferType](staking.address, owner, user1, DEFAULT_LOCK_AMOUNT)/*, STAKING_ERRORS.ERROR_NOT_ENOUGH_LOCK*/) await assertRevert(lockManager[transferType](staking.address, owner, user1, DEFAULT_LOCK_AMOUNT)/*, STAKING_ERRORS.ERROR_NOT_ENOUGH_LOCK */)
}) })
it('fails transferring from unlocked lock', async () => { it('fails transferring from unlocked lock', async () => {
await approveStakeAndLock({ staking, manager: lockManager.address, from: owner }) await approveStakeAndLock({ staking, manager: lockManager.address, from: owner })
// unlock // unlock
await lockManager.unlockAndRemoveManager(staking.address, owner) await lockManager.unlockAndRemoveManager(staking.address, owner)
await assertRevert(lockManager[transferType](staking.address, owner, user2, DEFAULT_LOCK_AMOUNT, { from: user1 })/*, STAKING_ERRORS.ERROR_NOT_ENOUGH_LOCK*/) await assertRevert(lockManager[transferType](staking.address, owner, user2, DEFAULT_LOCK_AMOUNT, { from: user1 })/*, STAKING_ERRORS.ERROR_NOT_ENOUGH_LOCK */)
}) })
} }

View File

@ -2,22 +2,22 @@ module.exports = {
networks: { networks: {
// Development network is just left as truffle's default settings // Development network is just left as truffle's default settings
ethermint: { ethermint: {
host: "127.0.0.1", // Localhost (default: none) host: '127.0.0.1', // Localhost (default: none)
port: 8545, // Standard Ethereum port (default: none) port: 8545, // Standard Ethereum port (default: none)
network_id: "*", // Any network (default: none) network_id: '*', // Any network (default: none)
gas: 7000000, // Gas sent with each transaction gas: 7000000, // Gas sent with each transaction
gasPrice: 1000000000, // 1 gwei (in wei) gasPrice: 1000000000 // 1 gwei (in wei)
}, }
}, },
compilers: { compilers: {
solc: { solc: {
version: "0.5.17", // A version or constraint - Ex. "^0.5.0". version: '0.5.17', // A version or constraint - Ex. "^0.5.0".
settings: { settings: {
optimizer: { optimizer: {
enabled: true, enabled: true,
runs: 10000, runs: 10000
}, }
}, }
}, }
}, }
} }

View File

@ -1,30 +1,27 @@
const Storage = artifacts.require('Storage'); const Storage = artifacts.require('Storage')
contract('Test Storage Contract', async function (accounts) { contract('Test Storage Contract', async function (accounts) {
let storageInstance
let storageInstance;
before(function () { before(function () {
console.log(`Using Accounts (${accounts.length}): \n${accounts.join('\n')}`); console.log(`Using Accounts (${accounts.length}): \n${accounts.join('\n')}`)
console.log('==========================\n'); console.log('==========================\n')
}) })
it('should deploy Stroage contract', async function () { it('should deploy Stroage contract', async function () {
storageInstance = await Storage.new(); storageInstance = await Storage.new()
console.log(`Deployed Storage at: ${storageInstance.address}`); console.log(`Deployed Storage at: ${storageInstance.address}`)
expect(storageInstance.address).not.to.be.undefined; expect(storageInstance.address).not.to.be.undefined
}); })
it('should succesfully stored a value', async function () { it('should succesfully stored a value', async function () {
const tx = await storageInstance.store(888); const tx = await storageInstance.store(888)
console.log(`Stored value 888 by tx: ${tx.tx}`); console.log(`Stored value 888 by tx: ${tx.tx}`)
expect(tx.tx).not.to.be.undefined; expect(tx.tx).not.to.be.undefined
}); })
it('should succesfully retrieve a value', async function () { it('should succesfully retrieve a value', async function () {
const value = await storageInstance.retrieve(); const value = await storageInstance.retrieve()
expect(value.toString()).to.equal('888'); expect(value.toString()).to.equal('888')
}); })
}) })

View File

@ -1,34 +1,31 @@
const Storage = artifacts.require('Storage'); const Storage = artifacts.require('Storage')
async function expectRevert(promise) { async function expectRevert (promise) {
try { try {
await promise; await promise
} catch (error) { } catch (error) {
if (error.message.indexOf('revert') === -1) { if (error.message.indexOf('revert') === -1) {
expect('revert').to.equal(error.message, 'Wrong kind of exception received'); expect('revert').to.equal(error.message, 'Wrong kind of exception received')
} }
return; return
} }
expect.fail('Expected an exception but none was received'); expect.fail('Expected an exception but none was received')
} }
contract('Test EVM Revert', async function (accounts) { contract('Test EVM Revert', async function (accounts) {
before(function () { before(function () {
console.log(`Using Accounts (${accounts.length}): \n${accounts.join('\n')}`); console.log(`Using Accounts (${accounts.length}): \n${accounts.join('\n')}`)
console.log('==========================\n'); console.log('==========================\n')
}) })
let storageInstance; let storageInstance
it('should deploy Stroage contract', async function () { it('should deploy Stroage contract', async function () {
storageInstance = await Storage.new(); storageInstance = await Storage.new()
console.log(`Deployed Storage at: ${storageInstance.address}`); console.log(`Deployed Storage at: ${storageInstance.address}`)
expect(storageInstance.address).not.to.be.undefined; expect(storageInstance.address).not.to.be.undefined
}); })
it('should revert when call `shouldRevert()`', async function () { it('should revert when call `shouldRevert()`', async function () {
await expectRevert(storageInstance.shouldRevert()); await expectRevert(storageInstance.shouldRevert())
}); })
}) })

View File

@ -1,33 +1,30 @@
const EventTest = artifacts.require('EventTest'); const EventTest = artifacts.require('EventTest')
const truffleAssert = require('truffle-assertions'); const truffleAssert = require('truffle-assertions')
contract('Test EventTest Contract', async function (accounts) { contract('Test EventTest Contract', async function (accounts) {
let eventInstance
let eventInstance;
before(function () { before(function () {
console.log(`Using Accounts (${accounts.length}): \n${accounts.join('\n')}`); console.log(`Using Accounts (${accounts.length}): \n${accounts.join('\n')}`)
console.log('==========================\n'); console.log('==========================\n')
}) })
it('should deploy EventTest contract', async function () { it('should deploy EventTest contract', async function () {
eventInstance = await EventTest.new(); eventInstance = await EventTest.new()
console.log(`Deployed EventTest at: ${eventInstance.address}`); console.log(`Deployed EventTest at: ${eventInstance.address}`)
expect(eventInstance.address).not.to.be.undefined; expect(eventInstance.address).not.to.be.undefined
}); })
it('should emit events', async function () { it('should emit events', async function () {
const tx = await eventInstance.storeWithEvent(888); const tx = await eventInstance.storeWithEvent(888)
truffleAssert.eventEmitted(tx, 'ValueStored1', events => { truffleAssert.eventEmitted(tx, 'ValueStored1', events => {
return events['0'].toString() === '888'; return events['0'].toString() === '888'
}); })
truffleAssert.eventEmitted(tx, 'ValueStored2', events => { truffleAssert.eventEmitted(tx, 'ValueStored2', events => {
return events['0'].toString() === 'TestMsg' && events['1'].toString() === '888'; return events['0'].toString() === 'TestMsg' && events['1'].toString() === '888'
}); })
truffleAssert.eventEmitted(tx, 'ValueStored3', events => { truffleAssert.eventEmitted(tx, 'ValueStored3', events => {
return events['0'].toString() === 'TestMsg' && events['1'].toString() === '888' && events['2'].toString() === '888'; return events['0'].toString() === 'TestMsg' && events['1'].toString() === '888' && events['2'].toString() === '888'
}); })
})
});
}) })

View File

@ -2,16 +2,16 @@ module.exports = {
networks: { networks: {
// Development network is just left as truffle's default settings // Development network is just left as truffle's default settings
ethermint: { ethermint: {
host: "127.0.0.1", // Localhost (default: none) host: '127.0.0.1', // Localhost (default: none)
port: 8545, // Standard Ethereum port (default: none) port: 8545, // Standard Ethereum port (default: none)
network_id: "*", // Any network (default: none) network_id: '*', // Any network (default: none)
gas: 5000000, // Gas sent with each transaction gas: 5000000, // Gas sent with each transaction
gasPrice: 1000000000, // 1 gwei (in wei) gasPrice: 1000000000 // 1 gwei (in wei)
}, }
}, },
compilers: { compilers: {
solc: { solc: {
version: "0.8.3", version: '0.8.3'
}, }
}, }
} }

View File

@ -1,23 +1,21 @@
const fs = require('fs'); const fs = require('fs')
const path = require('path'); const path = require('path')
const { exec, spawn } = require('child_process'); const { exec, spawn } = require('child_process')
const yargs = require('yargs/yargs') const yargs = require('yargs/yargs')
const { hideBin } = require('yargs/helpers') const { hideBin } = require('yargs/helpers')
const logger = { const logger = {
warn: msg => console.error(`WARN: ${msg}`), warn: msg => console.error(`WARN: ${msg}`),
err: msg => console.error(`ERR: ${msg}`), err: msg => console.error(`ERR: ${msg}`),
info: msg => console.log(`INFO: ${msg}`) info: msg => console.log(`INFO: ${msg}`)
} }
function panic(errMsg) { function panic (errMsg) {
logger.err(errMsg); logger.err(errMsg)
process.exit(-1); process.exit(-1)
} }
function checkTestEnv() { function checkTestEnv () {
const argv = yargs(hideBin(process.argv)) const argv = yargs(hideBin(process.argv))
.usage('Usage: $0 [options] <tests>') .usage('Usage: $0 [options] <tests>')
.example('$0 --network ethermint', 'run all tests using ethermint network') .example('$0 --network ethermint', 'run all tests using ethermint network')
@ -27,150 +25,142 @@ function checkTestEnv() {
.describe('batch', 'set the test batch in parallelized testing. Format: %d-%d') .describe('batch', 'set the test batch in parallelized testing. Format: %d-%d')
.describe('allowTests', 'only run specified tests. Separated by comma.') .describe('allowTests', 'only run specified tests. Separated by comma.')
.boolean('verbose-log').describe('verbose-log', 'print laconicd output, default false') .boolean('verbose-log').describe('verbose-log', 'print laconicd output, default false')
.argv; .argv
if (!fs.existsSync(path.join(__dirname, './node_modules'))) { if (!fs.existsSync(path.join(__dirname, './node_modules'))) {
panic('node_modules not existed. Please run `yarn install` before running tests.'); panic('node_modules not existed. Please run `yarn install` before running tests.')
} }
const runConfig = {}; const runConfig = {}
// Check test network // Check test network
if (!argv.network) { if (!argv.network) {
runConfig.network = 'ganache'; runConfig.network = 'ganache'
} } else {
else {
if (argv.network !== 'ethermint' && argv.network !== 'ganache') { if (argv.network !== 'ethermint' && argv.network !== 'ganache') {
panic('network is invalid. Must be ganache or ethermint'); panic('network is invalid. Must be ganache or ethermint')
} } else {
else { runConfig.network = argv.network
runConfig.network = argv.network;
} }
} }
if (argv.batch) { if (argv.batch) {
const [toRunBatch, allBatches] = argv.batch.split('-').map(e => Number(e))
const [toRunBatch, allBatches] = argv.batch.split('-').map(e => Number(e)); console.log([toRunBatch, allBatches])
console.log([toRunBatch, allBatches]);
if (!toRunBatch || !allBatches) { if (!toRunBatch || !allBatches) {
panic('bad batch input format'); panic('bad batch input format')
} }
if (toRunBatch > allBatches) { if (toRunBatch > allBatches) {
panic('test batch number is larger than batch counts'); panic('test batch number is larger than batch counts')
} }
if (toRunBatch <= 0 || allBatches <=0 ) { if (toRunBatch <= 0 || allBatches <= 0) {
panic('test batch number or batch counts must be non-zero values'); panic('test batch number or batch counts must be non-zero values')
} }
runConfig.batch = {}; runConfig.batch = {}
runConfig.batch.this = toRunBatch; runConfig.batch.this = toRunBatch
runConfig.batch.all = allBatches; runConfig.batch.all = allBatches
} }
// only test // only test
runConfig.onlyTest = !!argv['allowTests'] ? argv['allowTests'].split(',') : undefined; runConfig.onlyTest = argv.allowTests ? argv.allowTests.split(',') : undefined
runConfig.verboseLog = !!argv['verbose-log']; runConfig.verboseLog = !!argv['verbose-log']
logger.info(`Running on network: ${runConfig.network}`);
return runConfig;
logger.info(`Running on network: ${runConfig.network}`)
return runConfig
} }
function loadTests(runConfig) { function loadTests (runConfig) {
let validTests = []; let validTests = []
fs.readdirSync(path.join(__dirname, 'suites')).forEach(dirname => { fs.readdirSync(path.join(__dirname, 'suites')).forEach(dirname => {
const dirStat = fs.statSync(path.join(__dirname, 'suites', dirname)); const dirStat = fs.statSync(path.join(__dirname, 'suites', dirname))
if (!dirStat.isDirectory) { if (!dirStat.isDirectory) {
logger.warn(`${dirname} is not a directory. Skip this test suite.`); logger.warn(`${dirname} is not a directory. Skip this test suite.`)
return; return
} }
const needFiles = ['package.json', 'test']; const needFiles = ['package.json', 'test']
for (const f of needFiles) { for (const f of needFiles) {
if (!fs.existsSync(path.join(__dirname, 'suites', dirname, f))) { if (!fs.existsSync(path.join(__dirname, 'suites', dirname, f))) {
logger.warn(`${dirname} does not contains file/dir: ${f}. Skip this test suite.`); logger.warn(`${dirname} does not contains file/dir: ${f}. Skip this test suite.`)
return; return
} }
} }
// test package.json // test package.json
try { try {
const testManifest = JSON.parse(fs.readFileSync(path.join(__dirname, 'suites', dirname, 'package.json'), 'utf-8')) const testManifest = JSON.parse(fs.readFileSync(path.join(__dirname, 'suites', dirname, 'package.json'), 'utf-8'))
const needScripts = ['test-ganache', 'test-ethermint']; const needScripts = ['test-ganache', 'test-ethermint']
for (const s of needScripts) { for (const s of needScripts) {
if (Object.keys(testManifest['scripts']).indexOf(s) === -1) { if (Object.keys(testManifest.scripts).indexOf(s) === -1) {
logger.warn(`${dirname} does not have test script: \`${s}\`. Skip this test suite.`); logger.warn(`${dirname} does not have test script: \`${s}\`. Skip this test suite.`)
return; return
} }
} }
} catch (error) { } catch (error) {
logger.warn(`${dirname} test package.json load failed. Skip this test suite.`); logger.warn(`${dirname} test package.json load failed. Skip this test suite.`)
logger.err(error); logger.err(error)
return; return
} }
validTests.push(dirname); validTests.push(dirname)
}) })
if (runConfig.onlyTest) { if (runConfig.onlyTest) {
validTests = validTests.filter(t => runConfig.onlyTest.indexOf(t) !== -1); validTests = validTests.filter(t => runConfig.onlyTest.indexOf(t) !== -1)
} }
if (runConfig.batch) { if (runConfig.batch) {
const chunkSize = Math.ceil(validTests.length / runConfig.batch.all); const chunkSize = Math.ceil(validTests.length / runConfig.batch.all)
const toRunTests = validTests.slice( const toRunTests = validTests.slice(
(runConfig.batch.this - 1) * chunkSize, (runConfig.batch.this - 1) * chunkSize,
runConfig.batch.this === runConfig.batch.all ? undefined : runConfig.batch.this * chunkSize runConfig.batch.this === runConfig.batch.all ? undefined : runConfig.batch.this * chunkSize
); )
return toRunTests; return toRunTests
} } else {
else { return validTests
return validTests;
} }
} }
function performTestSuite({ testName, network }) { function performTestSuite ({ testName, network }) {
const cmd = network === 'ganache' ? 'test-ganache' : 'test-ethermint'; const cmd = network === 'ganache' ? 'test-ganache' : 'test-ethermint'
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const testProc = spawn('yarn', [cmd], { const testProc = spawn('yarn', [cmd], {
cwd: path.join(__dirname, 'suites', testName) cwd: path.join(__dirname, 'suites', testName)
}); })
testProc.stdout.pipe(process.stdout); testProc.stdout.pipe(process.stdout)
testProc.stderr.pipe(process.stderr); testProc.stderr.pipe(process.stderr)
testProc.on('close', code => { testProc.on('close', code => {
if (code === 0) { if (code === 0) {
console.log("end"); console.log('end')
resolve(); resolve()
} else {
reject(new Error(`Test: ${testName} exited with error code ${code}`))
} }
else { })
reject(new Error(`Test: ${testName} exited with error code ${code}`)); })
}
});
});
} }
async function performTests({ allTests, runConfig }) { async function performTests ({ allTests, runConfig }) {
if (allTests.length === 0) { if (allTests.length === 0) {
panic('No tests are found or all invalid!'); panic('No tests are found or all invalid!')
} }
for (const currentTestName of allTests) { for (const currentTestName of allTests) {
logger.info(`Start test: ${currentTestName}`); logger.info(`Start test: ${currentTestName}`)
await performTestSuite({ testName: currentTestName, network: runConfig.network }); await performTestSuite({ testName: currentTestName, network: runConfig.network })
} }
logger.info(`${allTests.length} test suites passed!`); logger.info(`${allTests.length} test suites passed!`)
} }
function setupNetwork({ runConfig, timeout }) { function setupNetwork ({ runConfig, timeout }) {
if (runConfig.network !== 'ethermint') { if (runConfig.network !== 'ethermint') {
// no need to start ganache. Truffle will start it // no need to start ganache. Truffle will start it
return; return
} }
// Spawn the ethermint process // Spawn the ethermint process
@ -178,52 +168,51 @@ function setupNetwork({ runConfig, timeout }) {
const spawnPromise = new Promise((resolve, reject) => { const spawnPromise = new Promise((resolve, reject) => {
const laconicdProc = spawn('./init-test-node.sh', { const laconicdProc = spawn('./init-test-node.sh', {
cwd: __dirname, cwd: __dirname,
stdio: ['ignore', runConfig.verboseLog ? 'pipe' : 'ignore', 'pipe'], stdio: ['ignore', runConfig.verboseLog ? 'pipe' : 'ignore', 'pipe']
}); })
logger.info(`Starting laconicd process... timeout: ${timeout}ms`); logger.info(`Starting laconicd process... timeout: ${timeout}ms`)
if (runConfig.verboseLog) { if (runConfig.verboseLog) {
laconicdProc.stdout.pipe(process.stdout); laconicdProc.stdout.pipe(process.stdout)
} }
laconicdProc.stderr.on('data', d => { laconicdProc.stderr.on('data', d => {
const oLine = d.toString(); const oLine = d.toString()
if (runConfig.verboseLog) { if (runConfig.verboseLog) {
process.stdout.write(oLine); process.stdout.write(oLine)
} }
if (oLine.indexOf('Starting JSON-RPC server') !== -1) { if (oLine.indexOf('Starting JSON-RPC server') !== -1) {
logger.info('laconicd started'); logger.info('laconicd started')
resolve(laconicdProc); resolve(laconicdProc)
} }
}); })
}); })
const timeoutPromise = new Promise((resolve, reject) => { const timeoutPromise = new Promise((resolve, reject) => {
setTimeout(() => reject(new Error('Start laconicd timeout!')), timeout); setTimeout(() => reject(new Error('Start laconicd timeout!')), timeout)
}); })
return Promise.race([spawnPromise, timeoutPromise]); return Promise.race([spawnPromise, timeoutPromise])
} }
async function main() { async function main () {
const runConfig = checkTestEnv()
const allTests = loadTests(runConfig)
const runConfig = checkTestEnv(); console.log(`Running Tests: ${allTests.join()}`)
const allTests = loadTests(runConfig);
console.log(`Running Tests: ${allTests.join()}`); const proc = await setupNetwork({ runConfig, timeout: 50000 })
await performTests({ allTests, runConfig })
const proc = await setupNetwork({ runConfig, timeout: 50000 });
await performTests({ allTests, runConfig });
if (proc) { if (proc) {
proc.kill(); proc.kill()
} }
process.exit(0); process.exit(0)
} }
// Add handler to exit the program when UnhandledPromiseRejection // Add handler to exit the program when UnhandledPromiseRejection
process.on('unhandledRejection', e => { process.on('unhandledRejection', e => {
console.error(e); console.error(e)
process.exit(-1); process.exit(-1)
}); })
main(); main()

View File

@ -1,4 +1,4 @@
import { ethers } from "ethers"; import { ethers } from 'ethers'
// connects to localhost:8545 // connects to localhost:8545
const provider = new ethers.providers.JsonRpcProvider() const provider = new ethers.providers.JsonRpcProvider()