Merge pull request #12376 from ethereum/develop

Merge `develop` into `breaking`
This commit is contained in:
chriseth 2021-12-13 12:59:33 +01:00 committed by GitHub
commit 0bbf58ec5e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
406 changed files with 2955 additions and 1992 deletions

View File

@ -20,7 +20,7 @@ cd build
$boost_dir=(Resolve-Path $PSScriptRoot\..\deps\boost\lib\cmake\Boost-*)
..\deps\cmake\bin\cmake -G "Visual Studio 16 2019" -DBoost_DIR="$boost_dir\" -DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded -DCMAKE_INSTALL_PREFIX="$PSScriptRoot\..\upload" -DUSE_Z3=OFF ..
if ( -not $? ) { throw "CMake configure failed." }
msbuild solidity.sln /p:Configuration=Release /m:5 /v:minimal
msbuild solidity.sln /p:Configuration=Release /m:10 /v:minimal
if ( -not $? ) { throw "Build failed." }
..\deps\cmake\bin\cmake --build . -j 5 --target install --config Release
..\deps\cmake\bin\cmake --build . -j 10 --target install --config Release
if ( -not $? ) { throw "Install target failed." }

View File

@ -9,20 +9,20 @@ version: 2.1
parameters:
ubuntu-2004-docker-image:
type: string
# solbuildpackpusher/solidity-buildpack-deps:ubuntu2004-8
default: "solbuildpackpusher/solidity-buildpack-deps@sha256:9c3cdfc1d573d1ca3edacd892590a9a83487a1f746a6ca2093d7e009818c5179"
# solbuildpackpusher/solidity-buildpack-deps:ubuntu2004-9
default: "solbuildpackpusher/solidity-buildpack-deps@sha256:3d8a912e8e78e98cd217955d06d98608ad60adc67728d4c3a569991235fa1abb"
ubuntu-2004-clang-docker-image:
type: string
# solbuildpackpusher/solidity-buildpack-deps:ubuntu2004.clang-8
default: "solbuildpackpusher/solidity-buildpack-deps@sha256:61232feea23c8c57e82cf5fae890f8b86bbec353cdc04f2fcba383ca589e1d8b"
# solbuildpackpusher/solidity-buildpack-deps:ubuntu2004.clang-9
default: "solbuildpackpusher/solidity-buildpack-deps@sha256:a1ba002cae17279d1396a898b04e4e9c45602ad881295db3e2f484a7e24f6f43"
ubuntu-1604-clang-ossfuzz-docker-image:
type: string
# solbuildpackpusher/solidity-buildpack-deps:ubuntu1604.clang.ossfuzz-13
default: "solbuildpackpusher/solidity-buildpack-deps@sha256:c26a7ffc9fc243a4ec3105b9dc1edcdd964ad0e9665c83172b7ebda74bbf3021"
# solbuildpackpusher/solidity-buildpack-deps:ubuntu1604.clang.ossfuzz-14
default: "solbuildpackpusher/solidity-buildpack-deps@sha256:f353823cce2f6cd2f9f1459d86cd76fdfc551a0261d87626615ea6c1d8f90587"
emscripten-docker-image:
type: string
# solbuildpackpusher/solidity-buildpack-deps:emscripten-7
default: "solbuildpackpusher/solidity-buildpack-deps@sha256:9ffcd0944433fe100e9433f2aa9ba5c21e096e758ad8a05a4a76feaed3d1f463"
# solbuildpackpusher/solidity-buildpack-deps:emscripten-8
default: "solbuildpackpusher/solidity-buildpack-deps@sha256:842d6074e0e7e5355c89122c1cafc1fdb59696596750e7d56e5f35c0d883ad59"
evm-version:
type: string
default: london
@ -44,10 +44,17 @@ commands:
name: "Gitter notification"
when: << parameters.condition >>
command: |
[[ $CI_PULL_REQUEST == "" ]] || { echo "Running on a PR - notification skipped."; exit 0; }
# FIXME: Checking $CIRCLE_PULL_REQUEST would be better than hard-coding branch names
# but it's broken. CircleCI associates runs on develop/breaking with random old PRs.
[[ $CIRCLE_BRANCH == develop || $CIRCLE_BRANCH == breaking ]] || { echo "Running on a PR or a feature branch - notification skipped."; exit 0; }
[[ "<< parameters.event >>" == "failure" ]] && message=" ❌ Nightly job **${CIRCLE_JOB}** failed on **${CIRCLE_BRANCH}**. Please see [build #${CIRCLE_BUILD_NUM}](${CIRCLE_BUILD_URL}) for details."
[[ "<< parameters.event >>" == "success" ]] && message=" ✅ Nightly job **${CIRCLE_JOB}** succeeded on **${CIRCLE_BRANCH}**. Please see [build #${CIRCLE_BUILD_NUM}](${CIRCLE_BUILD_URL}) for details."
# Workflow name is not exposed as an env variable. Has to be queried from the API.
# The name is not critical so if anything fails, use the raw workflow ID as a fallback.
workflow_info=$(curl --silent "https://circleci.com/api/v2/workflow/${CIRCLE_WORKFLOW_ID}") || true
workflow_name=$(echo "$workflow_info" | grep -E '"\s*name"\s*:\s*".*"' | cut -d \" -f 4 || echo "$CIRCLE_WORKFLOW_ID")
[[ "<< parameters.event >>" == "failure" ]] && message=" ❌ [${workflow_name}] Job **${CIRCLE_JOB}** failed on **${CIRCLE_BRANCH}**. Please see [build ${CIRCLE_BUILD_NUM}](${CIRCLE_BUILD_URL}) for details."
[[ "<< parameters.event >>" == "success" ]] && message=" ✅ [${workflow_name}] Job **${CIRCLE_JOB}** succeeded on **${CIRCLE_BRANCH}**. Please see [build ${CIRCLE_BUILD_NUM}](${CIRCLE_BUILD_URL}) for details."
curl "https://api.gitter.im/v1/rooms/${GITTER_NOTIFY_ROOM_ID}/chatMessages" \
--request POST \
@ -219,6 +226,14 @@ defaults:
- image: << pipeline.parameters.ubuntu-1604-clang-ossfuzz-docker-image >>
environment:
TERM: xterm
MAKEFLAGS: -j 3
- base_ubuntu1604_clang_small: &base_ubuntu1604_clang_small
<<: *base_ubuntu1604_clang
resource_class: small
environment:
TERM: xterm
MAKEFLAGS: -j 2
- base_ubuntu2004_clang: &base_ubuntu2004_clang
docker:
@ -229,20 +244,44 @@ defaults:
CXX: clang++
MAKEFLAGS: -j 3
- base_ubuntu2004_clang_xlarge: &base_ubuntu2004_clang_xlarge
- base_ubuntu2004_clang_small: &base_ubuntu2004_clang_small
<<: *base_ubuntu2004_clang
resource_class: xlarge
resource_class: small
environment:
TERM: xterm
CC: clang
CXX: clang++
MAKEFLAGS: -j 10
MAKEFLAGS: -j 2
- base_ubuntu2004_clang_large: &base_ubuntu2004_clang_large
<<: *base_ubuntu2004_clang
resource_class: large
environment:
TERM: xterm
CC: clang
CXX: clang++
MAKEFLAGS: -j 5
- base_ubuntu2004: &base_ubuntu2004
docker:
- image: << pipeline.parameters.ubuntu-2004-docker-image >>
environment:
TERM: xterm
MAKEFLAGS: -j 3
- base_ubuntu2004_small: &base_ubuntu2004_small
<<: *base_ubuntu2004
resource_class: small
environment:
TERM: xterm
MAKEFLAGS: -j 2
- base_ubuntu2004_large: &base_ubuntu2004_large
<<: *base_ubuntu2004
resource_class: large
environment:
TERM: xterm
MAKEFLAGS: -j 5
- base_ubuntu2004_xlarge: &base_ubuntu2004_xlarge
<<: *base_ubuntu2004
@ -251,29 +290,48 @@ defaults:
TERM: xterm
MAKEFLAGS: -j 10
- base_buildpack_focal: &base_buildpack_focal
- base_buildpack_focal_small: &base_buildpack_focal_small
docker:
- image: buildpack-deps:focal
resource_class: small
environment:
TERM: xterm
MAKEFLAGS: -j 2
- base_buildpack_latest: &base_buildpack_latest
- base_buildpack_latest_small: &base_buildpack_latest_small
docker:
- image: buildpack-deps:latest
resource_class: small
environment:
TERM: xterm
MAKEFLAGS: -j 2
- base_archlinux: &base_archlinux
docker:
- image: archlinux:base
environment:
TERM: xterm
MAKEFLAGS: -j 3
- base_archlinux_large: &base_archlinux_large
docker:
- image: archlinux:base
resource_class: large
environment:
TERM: xterm
MAKEFLAGS: -j 5
- base_win_powershell: &base_win_powershell
executor:
name: win/default
shell: powershell.exe
- base_win_powershell_large: &base_win_powershell_large
executor:
name: win/default
shell: powershell.exe
size: large
- base_win_cmd: &base_win_cmd
executor:
name: win/default
@ -284,26 +342,31 @@ defaults:
xcode: "11.0.0"
environment:
TERM: xterm
MAKEFLAGS: -j 5
- base_ems_xlarge: &base_ems_xlarge
- base_ems_large: &base_ems_large
docker:
- image: << pipeline.parameters.emscripten-docker-image >>
resource_class: xlarge
resource_class: large
environment:
TERM: xterm
MAKEFLAGS: -j 10
MAKEFLAGS: -j 5
- base_python: &base_python
- base_python_small: &base_python_small
docker:
- image: circleci/python:3.6
resource_class: small
environment:
TERM: xterm
MAKEFLAGS: -j 2
- base_node_latest: &base_node_latest
- base_node_latest_small: &base_node_latest_small
docker:
- image: circleci/node
resource_class: small
environment:
TERM: xterm
MAKEFLAGS: -j 2
# --------------------------------------------------------------------------
# Workflow Templates
@ -333,6 +396,11 @@ defaults:
requires:
- b_ubu_release
- workflow_ubuntu2004_static: &workflow_ubuntu2004_static
<<: *workflow_trigger_on_tags
requires:
- b_ubu_static
- workflow_archlinux: &workflow_archlinux
<<: *workflow_trigger_on_tags
requires:
@ -387,7 +455,7 @@ defaults:
jobs:
chk_spelling:
<<: *base_python
<<: *base_python_small
steps:
- checkout
- attach_workspace:
@ -402,7 +470,7 @@ jobs:
- gitter_notify_failure_unless_pr
chk_docs_examples:
<<: *base_node_latest
<<: *base_node_latest_small
steps:
- checkout
- attach_workspace:
@ -416,7 +484,7 @@ jobs:
- gitter_notify_failure_unless_pr
chk_coding_style:
<<: *base_buildpack_focal
<<: *base_buildpack_focal_small
steps:
- checkout
- run:
@ -434,7 +502,7 @@ jobs:
- gitter_notify_failure_unless_pr
chk_errorcodes:
<<: *base_python
<<: *base_python_small
steps:
- checkout
- run:
@ -443,7 +511,7 @@ jobs:
- gitter_notify_failure_unless_pr
chk_pylint:
<<: *base_buildpack_focal
<<: *base_buildpack_focal_small
steps:
- checkout
- run:
@ -459,7 +527,7 @@ jobs:
- gitter_notify_failure_unless_pr
chk_antlr_grammar:
<<: *base_buildpack_focal
<<: *base_buildpack_focal_small
steps:
- checkout
- run:
@ -471,7 +539,7 @@ jobs:
- gitter_notify_failure_unless_pr
chk_buglist:
<<: *base_node_latest
<<: *base_node_latest_small
steps:
- checkout
- run:
@ -486,7 +554,7 @@ jobs:
- gitter_notify_failure_unless_pr
chk_proofs:
<<: *base_buildpack_latest
<<: *base_buildpack_latest_small
steps:
- checkout
- run:
@ -499,14 +567,14 @@ jobs:
- gitter_notify_failure_unless_pr
chk_docs_pragma_min_version:
<<: *base_ubuntu2004
<<: *base_ubuntu2004_small
steps:
- checkout
- run: *run_docs_pragma_min_version
- gitter_notify_failure_unless_pr
t_ubu_pyscripts:
<<: *base_ubuntu2004
<<: *base_ubuntu2004_small
steps:
- checkout
- run:
@ -525,6 +593,8 @@ jobs:
- gitter_notify_failure_unless_pr
b_ubu: &b_ubu
# this runs 2x faster on xlarge but takes 4x more resources (compared to medium).
# Enough other jobs depend on it that it's worth it though.
<<: *base_ubuntu2004_xlarge
steps:
- checkout
@ -537,10 +607,11 @@ jobs:
# x64 ASAN build, for testing for memory related bugs
b_ubu_asan: &b_ubu_asan
<<: *base_ubuntu2004_xlarge
# Runs slightly faster on large and xlarge but we only run it nightly so efficiency matters more.
<<: *base_ubuntu2004
environment:
CMAKE_OPTIONS: -DSANITIZE=address
MAKEFLAGS: -j 10
MAKEFLAGS: -j 3
CMAKE_BUILD_TYPE: Release
steps:
- checkout
@ -550,7 +621,12 @@ jobs:
- gitter_notify_failure_unless_pr
b_ubu_clang: &b_ubu_clang
<<: *base_ubuntu2004_clang_xlarge
<<: *base_ubuntu2004_clang_large
environment:
TERM: xterm
CC: clang
CXX: clang++
MAKEFLAGS: -j 10
steps:
- checkout
- run: *run_build
@ -559,6 +635,7 @@ jobs:
- gitter_notify_failure_unless_pr
b_ubu_asan_clang: &b_ubu_asan_clang
# This runs a bit faster on large and xlarge but on nightly efficiency matters more.
<<: *base_ubuntu2004_clang
environment:
CC: clang
@ -573,6 +650,7 @@ jobs:
- gitter_notify_failure_unless_pr
b_ubu_ubsan_clang: &b_ubu_ubsan_clang
# This runs a bit faster on large and xlarge but on nightly efficiency matters more.
<<: *base_ubuntu2004_clang
environment:
CC: clang
@ -593,8 +671,10 @@ jobs:
MAKEFLAGS: -j 10
b_ubu_static:
# On large runs 2x faster than on medium. 3x on xlarge.
<<: *base_ubuntu2004_xlarge
environment:
TERM: xterm
MAKEFLAGS: -j 10
CMAKE_OPTIONS: -DCMAKE_BUILD_TYPE=Release -DUSE_Z3_DLOPEN=ON -DUSE_CVC4=OFF -DSOLC_STATIC_STDLIBS=ON
steps:
@ -604,14 +684,16 @@ jobs:
name: strip binary
command: strip build/solc/solc
- store_artifacts: *artifacts_solc
- persist_to_workspace: *artifacts_executables
- gitter_notify_failure_unless_pr
b_ubu_codecov:
<<: *base_ubuntu2004_xlarge
# Runs ~30% faster on large but we only run it nightly so efficiency matters more.
<<: *base_ubuntu2004
environment:
COVERAGE: ON
CMAKE_BUILD_TYPE: Debug
MAKEFLAGS: -j 10
MAKEFLAGS: -j 3
steps:
- checkout
- run: *run_build
@ -620,7 +702,6 @@ jobs:
t_ubu_codecov:
<<: *base_ubuntu2004
parallelism: 6
environment:
EVM: << pipeline.parameters.evm-version >>
OPTIMIZE: 1
@ -644,7 +725,7 @@ jobs:
# Builds in C++20 mode and uses debug build in order to speed up.
# Do *NOT* store any artifacts or workspace as we don't run tests on this build.
b_ubu_cxx20:
<<: *base_ubuntu2004_xlarge
<<: *base_ubuntu2004_large
environment:
CMAKE_BUILD_TYPE: Debug
CMAKE_OPTIONS: -DCMAKE_CXX_STANDARD=20 -DUSE_CVC4=OFF
@ -664,7 +745,7 @@ jobs:
- gitter_notify_failure_unless_pr
t_ubu_ossfuzz: &t_ubu_ossfuzz
<<: *base_ubuntu1604_clang
<<: *base_ubuntu1604_clang_small
steps:
- checkout
- attach_workspace:
@ -681,10 +762,10 @@ jobs:
- gitter_notify_success_unless_pr
b_archlinux:
<<: *base_archlinux
<<: *base_archlinux_large
environment:
TERM: xterm
MAKEFLAGS: -j 3
MAKEFLAGS: -j 5
steps:
- run:
name: Install build dependencies
@ -765,7 +846,10 @@ jobs:
- gitter_notify_failure_unless_pr
b_ems:
<<: *base_ems_xlarge
<<: *base_ems_large
environment:
TERM: xterm
MAKEFLAGS: -j 10
steps:
- checkout
- run:
@ -786,7 +870,7 @@ jobs:
- gitter_notify_failure_unless_pr
b_docs:
<<: *base_ubuntu2004
<<: *base_ubuntu2004_small
steps:
- checkout
- run: *setup_prerelease_commit_hash
@ -800,7 +884,7 @@ jobs:
t_ubu_soltest_all: &t_ubu_soltest_all
<<: *base_ubuntu2004
parallelism: 6
parallelism: 15 # 7 EVM versions, each with/without optimization + 1 ABIv1/@nooptions run
<<: *steps_soltest_all
t_archlinux_soltest: &t_archlinux_soltest
@ -843,14 +927,15 @@ jobs:
<<: *t_ubu_soltest_all
t_ubu_cli: &t_ubu_cli
<<: *base_ubuntu2004
<<: *base_ubuntu2004_small
<<: *steps_cmdline_tests
t_ubu_release_cli: &t_ubu_release_cli
<<: *t_ubu_cli
t_ubu_asan_cli:
<<: *base_ubuntu2004
# Runs slightly faster on medium but we only run it nightly so efficiency matters more.
<<: *base_ubuntu2004_small
environment:
TERM: xterm
ASAN_OPTIONS: check_initialization_order=true:detect_stack_use_after_return=true:strict_init_order=true:strict_string_checks=true:detect_invalid_pointer_pairs=2
@ -858,7 +943,6 @@ jobs:
t_ubu_asan_soltest:
<<: *base_ubuntu2004
parallelism: 6
environment:
EVM: << pipeline.parameters.evm-version >>
OPTIMIZE: 0
@ -882,10 +966,11 @@ jobs:
<<: *steps_soltest
t_ubu_ubsan_clang_cli:
<<: *base_ubuntu2004_clang
<<: *base_ubuntu2004_clang_small
<<: *steps_cmdline_tests
t_ems_solcjs:
# Unlike other t_ems jobs this one actually runs 2x faster on medium (compared to small).
<<: *base_ubuntu2004
steps:
- checkout
@ -906,7 +991,7 @@ jobs:
- gitter_notify_failure_unless_pr
t_ems_ext_hardhat:
<<: *base_node_latest
<<: *base_node_latest_small
environment:
TERM: xterm
HARDHAT_TESTS_SOLC_PATH: /tmp/workspace/soljson.js
@ -935,14 +1020,25 @@ jobs:
parameters:
project:
type: string
binary_type:
type: enum
enum:
- solcjs
- native
compile_only:
type: integer
default: 0
nodejs_version:
type: integer
default: 14
type: string
default: latest
resource_class:
type: string
default: small
docker:
- image: circleci/node:<<parameters.nodejs_version>>
resource_class: <<parameters.resource_class>>
# NOTE: Each external test does 3 separate compile&test runs
parallelism: 3
environment:
TERM: xterm
COMPILE_ONLY: <<parameters.compile_only>>
@ -951,18 +1047,30 @@ jobs:
- attach_workspace:
at: /tmp/workspace
- run:
name: Install dependencies
name: Install lsof
command: |
# lsof is used by Colony in its stop-blockchain-client.sh script
sudo apt-get -qy install lsof
- run:
name: External <<parameters.project>> tests
command: |
test/externalTests/<<parameters.project>>.sh /tmp/workspace/soljson.js
sudo apt-get --quiet --assume-yes --no-install-recommends install lsof
- when:
condition:
equal: [<< parameters.binary_type >>, "solcjs"]
steps:
- run:
name: External <<parameters.project>> tests (solcjs)
command: |
test/externalTests/<<parameters.project>>.sh solcjs /tmp/workspace/soljson.js
- when:
condition:
equal: [<< parameters.binary_type >>, "native"]
steps:
- run:
name: External <<parameters.project>> tests (native)
command: |
test/externalTests/<<parameters.project>>.sh native /tmp/workspace/solc/solc
- gitter_notify_failure_unless_pr
b_win: &b_win
<<: *base_win_powershell
<<: *base_win_powershell_large
steps:
# NOTE: Not disabling git's core.autocrlf here because we want to build using the typical Windows config.
- checkout
@ -1019,7 +1127,7 @@ jobs:
<<: *t_win_soltest
b_bytecode_ubu:
<<: *base_ubuntu2004
<<: *base_ubuntu2004_small
steps:
- checkout
- attach_workspace:
@ -1085,7 +1193,7 @@ jobs:
- gitter_notify_failure_unless_pr
b_bytecode_ems:
<<: *base_node_latest
<<: *base_node_latest_small
environment:
SOLC_EMSCRIPTEN: "On"
steps:
@ -1102,7 +1210,7 @@ jobs:
- gitter_notify_failure_unless_pr
t_bytecode_compare:
<<: *base_ubuntu2004
<<: *base_ubuntu2004_small
environment:
REPORT_FILES: |
bytecode-report-emscripten.txt
@ -1191,58 +1299,53 @@ workflows:
- t_ems_solcjs: *workflow_emscripten
- t_ems_ext_hardhat: *workflow_emscripten
# Separate compile-only runs of those external tests where a full run takes much longer.
- t_ems_ext:
<<: *workflow_emscripten
name: t_ems_compile_ext_colony
project: colony
binary_type: solcjs
compile_only: 1
nodejs_version: '14'
- t_ems_ext:
<<: *workflow_emscripten
name: t_ems_compile_ext_gnosis
<<: *workflow_ubuntu2004_static
name: t_native_compile_ext_gnosis
project: gnosis
binary_type: native
compile_only: 1
- t_ems_ext:
<<: *workflow_emscripten
name: t_ems_compile_ext_gnosis_v2
project: gnosis-v2
compile_only: 1
- t_ems_ext:
<<: *workflow_emscripten
name: t_ems_compile_ext_zeppelin
project: zeppelin
compile_only: 1
- t_ems_ext:
<<: *workflow_emscripten
name: t_ems_compile_ext_ens
project: ens
compile_only: 1
# NOTE: One of the dependencies (fsevents) fails to build its native extension on node.js 12+.
nodejs_version: 10
nodejs_version: '14'
# FIXME: Gnosis tests are pretty flaky right now. They often fail on CircleCI due to random ProviderError
# and there are also other less frequent problems. See https://github.com/gnosis/safe-contracts/issues/216.
#- t_ems_ext:
# <<: *workflow_emscripten
# name: t_ems_test_ext_gnosis
# name: t_native_test_ext_gnosis
# project: gnosis
# binary_type: native
# # NOTE: Tests do not start on node.js 14 ("ganache-cli exited early with code 1").
# nodejs_version: 12
# nodejs_version: '12'
- t_ems_ext:
<<: *workflow_emscripten
name: t_ems_test_ext_gnosis_v2
<<: *workflow_ubuntu2004_static
name: t_native_test_ext_gnosis_v2
project: gnosis-v2
binary_type: native
# NOTE: Tests do not start on node.js 14 ("ganache-cli exited early with code 1").
nodejs_version: 12
nodejs_version: '12'
- t_ems_ext:
<<: *workflow_emscripten
name: t_ems_test_ext_zeppelin
<<: *workflow_ubuntu2004_static
name: t_native_test_ext_zeppelin
project: zeppelin
binary_type: native
# NOTE: Tests crash on nodejs 17: "Error: error:0308010C:digital envelope routines::unsupported"
nodejs_version: '16'
resource_class: large
- t_ems_ext:
<<: *workflow_emscripten
name: t_ems_test_ext_ens
<<: *workflow_ubuntu2004_static
name: t_native_test_ext_ens
project: ens
binary_type: native
# NOTE: One of the dependencies (fsevents) fails to build its native extension on node.js 12+.
nodejs_version: 10
nodejs_version: '10'
# Windows build and tests
- b_win: *workflow_trigger_on_tags
@ -1307,3 +1410,6 @@ workflows:
<<: *workflow_emscripten
name: t_ems_test_ext_colony
project: colony
binary_type: solcjs
nodejs_version: '14'
resource_class: medium

View File

@ -57,14 +57,15 @@ then
brew install cmake
brew install wget
brew install coreutils
brew install diffutils
./scripts/install_obsolete_jsoncpp_1_7_4.sh
# z3
z3_version="4.8.12"
z3_dir="z3-${z3_version}-x64-osx-10.15.7"
z3_version="4.8.13"
z3_dir="z3-${z3_version}-x64-osx-10.16"
z3_package="${z3_dir}.zip"
wget "https://github.com/Z3Prover/z3/releases/download/z3-${z3_version}/${z3_package}"
validate_checksum "$z3_package" a1f6ef3c99456147c4d3f2652dc6bc90951c4ab3fe7741a255eb794f0ab8938c
validate_checksum "$z3_package" 191b26be2b617b2dffffce139d77abcd7e584859efbc10a58d01a1d7830697a4
unzip "$z3_package"
rm "$z3_package"
cp "${z3_dir}/bin/libz3.a" /usr/local/lib

View File

@ -28,27 +28,32 @@ set -e
REPODIR="$(realpath "$(dirname "$0")"/..)"
# shellcheck source=scripts/common.sh
source "${REPODIR}/scripts/common.sh"
# NOTE: If you add/remove values, remember to update `parallelism` setting in CircleCI config.
EVM_VALUES=(homestead byzantium constantinople petersburg istanbul berlin london)
DEFAULT_EVM=london
[[ " ${EVM_VALUES[*]} " =~ $DEFAULT_EVM ]]
OPTIMIZE_VALUES=(0 1)
STEPS=$(( 1 + ${#EVM_VALUES[@]} * ${#OPTIMIZE_VALUES[@]} ))
if (( CIRCLE_NODE_TOTAL )) && (( CIRCLE_NODE_TOTAL > 1 ))
then
RUN_STEPS=$(seq "$STEPS" | circleci tests split | xargs)
else
RUN_STEPS=$(seq "$STEPS" | xargs)
fi
echo "Running steps $RUN_STEPS..."
RUN_STEPS=$(circleci_select_steps "$(seq "$STEPS")")
printTask "Running steps $RUN_STEPS..."
STEP=1
# Run for ABI encoder v1, without SMTChecker tests.
[[ " $RUN_STEPS " == *" $STEP "* ]] && EVM="${DEFAULT_EVM}" OPTIMIZE=1 ABI_ENCODER_V1=1 BOOST_TEST_ARGS="-t !smtCheckerTests" "${REPODIR}/.circleci/soltest.sh"
STEP=$((STEP + 1))
if circleci_step_selected "$RUN_STEPS" "$STEP"
then
EVM="${DEFAULT_EVM}" \
OPTIMIZE=1 \
ABI_ENCODER_V1=1 \
BOOST_TEST_ARGS="-t !smtCheckerTests" \
"${REPODIR}/.circleci/soltest.sh"
fi
((++STEP))
for OPTIMIZE in "${OPTIMIZE_VALUES[@]}"
do
@ -63,13 +68,16 @@ do
DISABLE_SMTCHECKER=""
[ "${OPTIMIZE}" != "0" ] && DISABLE_SMTCHECKER="-t !smtCheckerTests"
[[ " $RUN_STEPS " == *" $STEP "* ]] && EVM="$EVM" OPTIMIZE="$OPTIMIZE" SOLTEST_FLAGS="$SOLTEST_FLAGS $ENFORCE_GAS_ARGS $EWASM_ARGS" BOOST_TEST_ARGS="-t !@nooptions $DISABLE_SMTCHECKER" "${REPODIR}/.circleci/soltest.sh"
STEP=$((STEP + 1))
if circleci_step_selected "$RUN_STEPS" "$STEP"
then
EVM="$EVM" \
OPTIMIZE="$OPTIMIZE" \
SOLTEST_FLAGS="$SOLTEST_FLAGS $ENFORCE_GAS_ARGS $EWASM_ARGS" \
BOOST_TEST_ARGS="-t !@nooptions $DISABLE_SMTCHECKER" \
"${REPODIR}/.circleci/soltest.sh"
fi
((++STEP))
done
done
if ((STEP != STEPS + 1))
then
echo "Step counter not properly adjusted!" >&2
exit 1
fi
((STEP == STEPS + 1)) || assertFail "Step counter not properly adjusted!"

View File

@ -65,7 +65,7 @@ configure_file("${CMAKE_SOURCE_DIR}/cmake/templates/license.h.in" include/licens
include(EthOptions)
configure_project(TESTS)
set(LATEST_Z3_VERSION "4.8.12")
set(LATEST_Z3_VERSION "4.8.13")
set(MINIMUM_Z3_VERSION "4.8.0")
find_package(Z3)
if (${Z3_FOUND})

View File

@ -16,6 +16,10 @@ Compiler Features:
Bugfixes:
* Code Generator: Fix a crash when using ``@use-src`` and compiling from Yul to ewasm.
* SMTChecker: Fix internal error when an unsafe target is solved more than once and the counterexample messages are different.
* Fix internal error when a function has a calldata struct argument with an internal type inside.
### 0.8.10 (2021-11-09)
@ -111,7 +115,7 @@ Bugfixes:
* SMTChecker: Fix false positive in external calls from constructors.
* SMTChecker: Fix internal error on some multi-source uses of ``abi.*``, cryptographic functions and constants.
* Standard JSON: Fix non-fatal errors in Yul mode being discarded if followed by a fatal error.
* Type Checker: Correct wrong error message in inline assembly complaining about ``.slot`` or ``.offset` not valid when actually ``.length`` was used.
* Type Checker: Correct wrong error message in inline assembly complaining about ``.slot`` or ``.offset`` not valid when actually ``.length`` was used.
* Type Checker: Disallow modifier declarations and definitions in interfaces.
* Yul Optimizer: Fix a crash in LoadResolver, when ``keccak256`` has particular non-identifier arguments.

View File

@ -13,13 +13,13 @@
</span>
<div class="rst-other-versions">
<dl>
<dt>{{ _('Versions') }}</dt> {% for slug, url in versions %}
<dd><a href="{{ url }}">{{ slug }}</a></dd>
<dt>{{ _('Downloads') }}</dt> {% for type, url in downloads %}
<dd><a href="{{ url }}">{{ type }}</a></dd>
{% endfor %}
</dl>
<dl>
<dt>{{ _('Downloads') }}</dt> {% for type, url in downloads %}
<dd><a href="{{ url }}">{{ type }}</a></dd>
<dt>{{ _('Versions') }}</dt> {% for slug, url in versions %}
<dd><a href="{{ url }}">{{ slug }}</a></dd>
{% endfor %}
</dl>
<dl>

View File

@ -749,7 +749,7 @@ Non-standard Packed Mode
Through ``abi.encodePacked()``, Solidity supports a non-standard packed mode where:
- types shorter than 32 bytes are neither zero padded nor sign extended and
- types shorter than 32 bytes are concatenated directly, without padding or sign extension
- dynamic types are encoded in-place and without the length.
- array elements are padded, but still encoded in-place

View File

@ -59,7 +59,7 @@ Under the following terms:
- **Attribution** — You must give appropriate credit, provide a link to
the license, and indicate if changes were made. You may do so in any
reasonable manner, but not in any way that suggests the the Solidity
reasonable manner, but not in any way that suggests that the Solidity
core team endorses you or your use.
When using the Solidity logo, please respect the Solidity logo guidelines.

View File

@ -81,7 +81,7 @@ Global Variables
- ``abi.encodeWithSelector(bytes4 selector, ...) returns (bytes memory)``: :ref:`ABI <ABI>`-encodes
the given arguments starting from the second and prepends the given four-byte selector
- ``abi.encodeWithSignature(string memory signature, ...) returns (bytes memory)``: Equivalent
to ``abi.encodeWithSelector(bytes4(keccak256(bytes(signature)), ...)```
to ``abi.encodeWithSelector(bytes4(keccak256(bytes(signature)), ...)``
- ``bytes.concat(...) returns (bytes memory)``: :ref:`Concatenates variable number of
arguments to one byte array<bytes-concat>`
- ``block.basefee`` (``uint``): current block's base fee (`EIP-3198 <https://eips.ethereum.org/EIPS/eip-3198>`_ and `EIP-1559 <https://eips.ethereum.org/EIPS/eip-1559>`_)

View File

@ -31,6 +31,12 @@ a 0.y.z version number `to indicate this fast pace of change <https://semver.org
Ideas for improving Solidity or this documentation are always welcome,
read our :doc:`contributors guide <contributing>` for more details.
.. Hint::
You can download this documentation as PDF, HTML or Epub by clicking on the versions
flyout menu in the bottom-left corner and selecting the preferred download format.
Getting Started
---------------

View File

@ -521,7 +521,7 @@ ExpressionSplitter
The expression splitter turns expressions like ``add(mload(0x123), mul(mload(0x456), 0x20))``
into a sequence of declarations of unique variables that are assigned sub-expressions
of that expression so that each function call has only variables or literals
of that expression so that each function call has only variables
as arguments.
The above would be transformed into
@ -529,10 +529,13 @@ The above would be transformed into
.. code-block:: yul
{
let _1 := mload(0x123)
let _2 := mul(_1, 0x20)
let _3 := mload(0x456)
let z := add(_3, _2)
let _1 := 0x20
let _2 := 0x456
let _3 := mload(_2)
let _4 := mul(_3, _1)
let _5 := 0x123
let _6 := mload(_5)
let z := add(_6, _4)
}
Note that this transformation does not change the order of opcodes or function calls.
@ -543,7 +546,7 @@ this "outlining" of the inner expressions in all cases. We can sidestep this lim
The final program should be in a form such that (with the exception of loop conditions)
function calls cannot appear nested inside expressions
and all function call arguments have to be literals or variables.
and all function call arguments have to be variables.
The benefits of this form are that it is much easier to re-order the sequence of opcodes
and it is also easier to perform function call inlining. Furthermore, it is simpler
@ -972,15 +975,19 @@ BlockFlattener
^^^^^^^^^^^^^^
This stage eliminates nested blocks by inserting the statement in the
inner block at the appropriate place in the outer block:
inner block at the appropriate place in the outer block. It depends on the
FunctionGrouper and does not flatten the outermost block to keep the form
produced by the FunctionGrouper.
.. code-block:: yul
{
let x := 2
{
let y := 3
mstore(x, y)
let x := 2
{
let y := 3
mstore(x, y)
}
}
}
@ -989,9 +996,11 @@ is transformed to
.. code-block:: yul
{
let x := 2
let y := 3
mstore(x, y)
{
let x := 2
let y := 3
mstore(x, y)
}
}
As long as the code is disambiguated, this does not cause a problem because

View File

@ -313,7 +313,7 @@ likely it will be.
since it is not up to the submitter of a transaction, but up to the miners to determine in which block the transaction is included.
If you want to schedule future calls of your contract, you can use
the `alarm clock <https://www.ethereum-alarm-clock.com/>`_ or a similar oracle service.
a smart contract automation tool or an oracle service.
.. _the-ethereum-virtual-machine:
@ -584,5 +584,5 @@ but instead is implemented in the EVM execution environment itself.
Different EVM-compatible chains might use a different set of
precompiled contracts. It might also be possible that new
precompiled contracts are added to the Ethereum main chain in the future,
but you can reasonabyly expect them to always be in the range between
but you can reasonably expect them to always be in the range between
``1`` and ``0xffff`` (inclusive).

View File

@ -747,7 +747,7 @@ Yes:
function thisFunctionNameIsReallyLong(
address x,
address y,
address z,
address z
)
public
onlyOwner

View File

@ -437,14 +437,16 @@ an error. You can prepend (for integer types) or append (for bytesNN types) zero
Rational and Integer Literals
-----------------------------
Integer literals are formed from a sequence of numbers in the range 0-9.
Integer literals are formed from a sequence of digits in the range 0-9.
They are interpreted as decimals. For example, ``69`` means sixty nine.
Octal literals do not exist in Solidity and leading zeros are invalid.
Decimal fraction literals are formed by a ``.`` with at least one number on
Decimal fractional literals are formed by a ``.`` with at least one number on
one side. Examples include ``1.``, ``.1`` and ``1.3``.
Scientific notation is also supported, where the base can have fractions and the exponent cannot.
Scientific notation in the form of ``2e10`` is also supported, where the
mantissa can be fractional but the exponent has to be an integer.
The literal ``MeE`` is equivalent to ``M * 10**E``.
Examples include ``2e10``, ``-2e10``, ``2e-10``, ``2.5e1``.
Underscores can be used to separate the digits of a numeric literal to aid readability.
@ -507,7 +509,7 @@ String literals are written with either double or single-quotes (``"foo"`` or ``
For example, with ``bytes32 samevar = "stringliteral"`` the string literal is interpreted in its raw byte form when assigned to a ``bytes32`` type.
String literals can only contain printable ASCII characters, which means the characters between and including 0x1F .. 0x7E.
String literals can only contain printable ASCII characters, which means the characters between and including 0x20 .. 0x7E.
Additionally, string literals also support the following escape characters:

View File

@ -135,7 +135,7 @@ ABI Encoding and Decoding Functions
- ``abi.encode(...) returns (bytes memory)``: ABI-encodes the given arguments
- ``abi.encodePacked(...) returns (bytes memory)``: Performs :ref:`packed encoding <abi_packed_mode>` of the given arguments. Note that packed encoding can be ambiguous!
- ``abi.encodeWithSelector(bytes4 selector, ...) returns (bytes memory)``: ABI-encodes the given arguments starting from the second and prepends the given four-byte selector
- ``abi.encodeWithSignature(string memory signature, ...) returns (bytes memory)``: Equivalent to ``abi.encodeWithSelector(bytes4(keccak256(bytes(signature))), ...)```
- ``abi.encodeWithSignature(string memory signature, ...) returns (bytes memory)``: Equivalent to ``abi.encodeWithSelector(bytes4(keccak256(bytes(signature))), ...)``
.. note::
These encoding functions can be used to craft data for external function calls without actually

View File

@ -166,7 +166,7 @@ Inside a code block, the following elements can be used
- if statements, e.g. ``if lt(a, b) { sstore(0, 1) }``
- switch statements, e.g. ``switch mload(0) case 0 { revert() } default { mstore(0, 1) }``
- for loops, e.g. ``for { let i := 0} lt(i, 10) { i := add(i, 1) } { mstore(i, 7) }``
- function definitions, e.g. ``function f(a, b) -> c { c := add(a, b) }```
- function definitions, e.g. ``function f(a, b) -> c { c := add(a, b) }``
Multiple syntactical elements can follow each other simply separated by
whitespace, i.e. there is no terminating ``;`` or newline required.
@ -714,13 +714,15 @@ We will use a destructuring notation for the AST nodes.
L'[$parami] = vi and L'[$reti] = 0 for all i.
Let G'', L'', mode = E(Gn, L', block)
G'', Ln, L''[$ret1], ..., L''[$retm]
E(G, L, l: StringLiteral) = G, L, utf8EncodeLeftAligned(l),
where utf8EncodeLeftAligned performs a UTF-8 encoding of l
and aligns it left into 32 bytes
E(G, L, l: StringLiteral) = G, L, str(l),
where str is the string evaluation function,
which for the EVM dialect is defined in the section 'Literals' above
E(G, L, n: HexNumber) = G, L, hex(n)
where hex is the hexadecimal decoding function
where hex is the hexadecimal evaluation function,
which turns a sequence of hexadecimal digits into their big endian value
E(G, L, n: DecimalNumber) = G, L, dec(n),
where dec is the decimal decoding function
where dec is the decimal evaluation function,
which turns a sequence of decimal digits into their big endian value
.. _opcodes:

View File

@ -100,7 +100,7 @@ string CharStream::lineAtPosition(int _position) const
return line;
}
tuple<int, int> CharStream::translatePositionToLineColumn(int _position) const
LineColumn CharStream::translatePositionToLineColumn(int _position) const
{
using size_type = string::size_type;
using diff_type = string::difference_type;
@ -114,7 +114,7 @@ tuple<int, int> CharStream::translatePositionToLineColumn(int _position) const
lineStart = m_source.rfind('\n', searchPosition - 1);
lineStart = lineStart == string::npos ? 0 : lineStart + 1;
}
return tuple<int, int>(lineNumber, searchPosition - lineStart);
return LineColumn{lineNumber, static_cast<int>(searchPosition - lineStart)};
}
string_view CharStream::text(SourceLocation const& _location) const
@ -144,3 +144,32 @@ string CharStream::singleLineSnippet(string const& _sourceCode, SourceLocation c
return cut;
}
optional<int> CharStream::translateLineColumnToPosition(LineColumn const& _lineColumn) const
{
return translateLineColumnToPosition(m_source, _lineColumn);
}
optional<int> CharStream::translateLineColumnToPosition(std::string const& _text, LineColumn const& _input)
{
if (_input.line < 0)
return nullopt;
size_t offset = 0;
for (int i = 0; i < _input.line; i++)
{
offset = _text.find('\n', offset);
if (offset == _text.npos)
return nullopt;
offset++; // Skip linefeed.
}
size_t endOfLine = _text.find('\n', offset);
if (endOfLine == string::npos)
endOfLine = _text.size();
if (offset + static_cast<size_t>(_input.column) > endOfLine)
return nullopt;
return offset + static_cast<size_t>(_input.column);
}

View File

@ -51,6 +51,7 @@
#pragma once
#include <cstdint>
#include <optional>
#include <string>
#include <tuple>
#include <utility>
@ -59,6 +60,7 @@ namespace solidity::langutil
{
struct SourceLocation;
struct LineColumn;
/**
* Bidirectional stream of characters.
@ -97,9 +99,15 @@ public:
/// Functions that help pretty-printing parse errors
/// Do only use in error cases, they are quite expensive.
std::string lineAtPosition(int _position) const;
std::tuple<int, int> translatePositionToLineColumn(int _position) const;
LineColumn translatePositionToLineColumn(int _position) const;
///@}
/// Translates a line:column to the absolute position.
std::optional<int> translateLineColumnToPosition(LineColumn const& _lineColumn) const;
/// Translates a line:column to the absolute position for the given input text.
static std::optional<int> translateLineColumnToPosition(std::string const& _text, LineColumn const& _input);
/// Tests whether or not given octet sequence is present at the current position in stream.
/// @returns true if the sequence could be found, false otherwise.
bool prefixMatch(std::string_view _sequence)

View File

@ -75,6 +75,16 @@ Error::Error(
*this << util::errinfo_comment(_description);
}
SourceLocation const* Error::sourceLocation() const noexcept
{
return boost::get_error_info<errinfo_sourceLocation>(*this);
}
SecondarySourceLocation const* Error::secondarySourceLocation() const noexcept
{
return boost::get_error_info<errinfo_secondarySourceLocation>(*this);
}
optional<Error::Severity> Error::severityFromString(string _input)
{
boost::algorithm::to_lower(_input);

View File

@ -197,6 +197,9 @@ public:
Type type() const { return m_type; }
std::string const& typeName() const { return m_typeName; }
SourceLocation const* sourceLocation() const noexcept;
SecondarySourceLocation const* secondarySourceLocation() const noexcept;
/// helper functions
static Error const* containsErrorOfType(ErrorList const& _list, Error::Type _type)
{
@ -206,7 +209,7 @@ public:
return nullptr;
}
static Severity errorSeverity(Type _type)
static constexpr Severity errorSeverity(Type _type)
{
if (_type == Type::Info)
return Severity::Info;

View File

@ -119,4 +119,23 @@ SourceLocation parseSourceLocation(
/// Stream output for Location (used e.g. in boost exceptions).
std::ostream& operator<<(std::ostream& _out, SourceLocation const& _location);
/**
* Alternative, line-column-based representation for source locations.
* Both line and column are zero-based.
* If used as a range, the second location is considered exclusive.
* Negative values are invalid.
*/
struct LineColumn
{
/// Line value, can be between zero and number of `\n` characters in the source file.
int line = -1;
/// Column value, can be between zero and number of characters in the line (inclusive).
int column = -1;
LineColumn() = default;
explicit LineColumn(int _line, int _column): line(_line), column(_column) {}
};
}

View File

@ -30,15 +30,6 @@ namespace solidity::langutil
class CharStreamProvider;
struct LineColumn
{
int line = {-1};
int column = {-1};
LineColumn() = default;
LineColumn(std::tuple<int, int> const& _t): line{std::get<0>(_t)}, column{std::get<1>(_t)} {}
};
struct SourceReference
{
std::string message; ///< A message that relates to this source reference (such as a warning, info or an error message).

View File

@ -387,7 +387,7 @@ bool TypeChecker::visit(FunctionDefinition const& _function)
_var.referenceLocation() == VariableDeclaration::Location::Storage &&
!m_currentContract->abstract()
)
m_errorReporter.typeError(
m_errorReporter.fatalTypeError(
3644_error,
_var.location(),
"This parameter has a type that can only be used internally. "
@ -403,7 +403,7 @@ bool TypeChecker::visit(FunctionDefinition const& _function)
solAssert(!message.empty(), "Expected detailed error message!");
if (_function.isConstructor())
message += " You can make the contract abstract to avoid this problem.";
m_errorReporter.typeError(4103_error, _var.location(), message);
m_errorReporter.fatalTypeError(4103_error, _var.location(), message);
}
else if (
!useABICoderV2() &&

View File

@ -972,8 +972,6 @@ void CHC::resetSourceAnalysis()
{
SMTEncoder::resetSourceAnalysis();
m_safeTargets.clear();
m_unsafeTargets.clear();
m_unprovedTargets.clear();
m_invariants.clear();
m_functionTargetIds.clear();

View File

@ -129,7 +129,7 @@ Json::Value formatErrorWithException(
_charStreamProvider
);
if (string const* description = boost::get_error_info<util::errinfo_comment>(_exception))
if (string const* description = _exception.comment())
message = ((_message.length() > 0) ? (_message + ":") : "") + *description;
else
message = _message;

View File

@ -79,6 +79,17 @@ string solidity::util::readUntilEnd(istream& _stdin)
return ss.str();
}
string solidity::util::readBytes(istream& _input, size_t _length)
{
string output;
output.resize(_length);
_input.read(output.data(), static_cast<streamsize>(_length));
// If read() reads fewer bytes it sets failbit in addition to eofbit.
if (_input.fail())
output.resize(static_cast<size_t>(_input.gcount()));
return output;
}
#if defined(_WIN32)
class DisableConsoleBuffering
{

View File

@ -57,6 +57,10 @@ std::string readFileAsString(boost::filesystem::path const& _file);
/// Retrieves and returns the whole content of the specified input stream (until EOF).
std::string readUntilEnd(std::istream& _stdin);
/// Tries to read exactly @a _length bytes from @a _input.
/// Returns a string containing as much data as has been read.
std::string readBytes(std::istream& _input, size_t _length);
/// Retrieves and returns a character from standard input (without waiting for EOL).
int readStandardInputChar();

View File

@ -39,8 +39,6 @@ struct Exception: virtual std::exception, virtual boost::exception
/// @returns the errinfo_comment of this exception.
std::string const* comment() const noexcept;
private:
};
/// Throws an exception with a given description and extra information about the location the
@ -56,6 +54,10 @@ private:
::boost::throw_line(__LINE__) \
)
/// Defines an exception type that's meant to signal a specific condition and be caught rather than
/// unwind the stack all the way to the top-level exception handler and interrupt the program.
/// As such it does not carry a message - the code catching it is expected to handle it without
/// letting it escape.
#define DEV_SIMPLE_EXCEPTION(X) struct X: virtual ::solidity::util::Exception { const char* what() const noexcept override { return #X; } }
DEV_SIMPLE_EXCEPTION(InvalidAddress);

View File

@ -173,10 +173,10 @@ add_library(yul
optimiser/OptimizerUtilities.h
optimiser/ReasoningBasedSimplifier.cpp
optimiser/ReasoningBasedSimplifier.h
optimiser/RedundantAssignEliminator.cpp
optimiser/RedundantAssignEliminator.h
optimiser/RedundantStoreBase.cpp
optimiser/RedundantStoreBase.h
optimiser/UnusedAssignEliminator.cpp
optimiser/UnusedAssignEliminator.h
optimiser/UnusedStoreBase.cpp
optimiser/UnusedStoreBase.h
optimiser/Rematerialiser.cpp
optimiser/Rematerialiser.h
optimiser/SMTSolver.cpp

View File

@ -137,7 +137,11 @@ void EVMToEwasmTranslator::parsePolyfill()
string(solidity::yul::wasm::polyfill::Logical) +
string(solidity::yul::wasm::polyfill::Memory) +
"}", "");
m_polyfill = Parser(errorReporter, WasmDialect::instance()).parse(charStream);
// Passing an empty SourceLocation() here is a workaround to prevent a crash
// when compiling from yul->ewasm. We're stripping nativeLocation and
// originLocation from the AST (but we only really need to strip nativeLocation)
m_polyfill = Parser(errorReporter, WasmDialect::instance(), langutil::SourceLocation()).parse(charStream);
if (!errors.empty())
{
string message;

View File

@ -43,3 +43,15 @@ void BlockFlattener::operator()(Block& _block)
}
);
}
void BlockFlattener::run(OptimiserStepContext&, Block& _ast)
{
BlockFlattener flattener;
for (auto& statement: _ast.statements)
if (auto* block = get_if<Block>(&statement))
flattener(*block);
else if (auto* function = get_if<FunctionDefinition>(&statement))
flattener(function->body);
else
yulAssert(false, "BlockFlattener requires the FunctionGrouper.");
}

View File

@ -27,7 +27,7 @@ class BlockFlattener: public ASTModifier
{
public:
static constexpr char const* name{"BlockFlattener"};
static void run(OptimiserStepContext&, Block& _ast) { BlockFlattener{}(_ast); }
static void run(OptimiserStepContext&, Block& _ast);
using ASTModifier::operator();
void operator()(Block& _block) override;

View File

@ -18,6 +18,7 @@
#include <libyul/optimiser/CircularReferencesPruner.h>
#include <libyul/optimiser/CallGraphGenerator.h>
#include <libyul/optimiser/FunctionGrouper.h>
#include <libyul/optimiser/OptimizerUtilities.h>
#include <libyul/AST.h>
@ -29,6 +30,7 @@ using namespace solidity::yul;
void CircularReferencesPruner::run(OptimiserStepContext& _context, Block& _ast)
{
CircularReferencesPruner{_context.reservedIdentifiers}(_ast);
FunctionGrouper::run(_context, _ast);
}
void CircularReferencesPruner::operator()(Block& _block)

View File

@ -22,6 +22,7 @@
#include <libyul/optimiser/ExpressionJoiner.h>
#include <libyul/optimiser/FunctionGrouper.h>
#include <libyul/optimiser/NameCollector.h>
#include <libyul/optimiser/OptimizerUtilities.h>
#include <libyul/Exceptions.h>
@ -37,9 +38,10 @@ using namespace std;
using namespace solidity;
using namespace solidity::yul;
void ExpressionJoiner::run(OptimiserStepContext&, Block& _ast)
void ExpressionJoiner::run(OptimiserStepContext& _context, Block& _ast)
{
ExpressionJoiner{_ast}(_ast);
FunctionGrouper::run(_context, _ast);
}

View File

@ -34,7 +34,7 @@ struct OptimiserStepContext;
* all function definitions.
*
* After this step, a block is of the form
* { { I...} F... }
* { { I... } F... }
* Where I are (non-function-definition) instructions and F are function definitions.
*/
class FunctionGrouper

View File

@ -37,6 +37,8 @@ namespace solidity::yul
{
/// Removes statements that are just empty blocks (non-recursive).
/// If this is run on the outermost block, the FunctionGrouper should be run afterwards to keep
/// the canonical form.
void removeEmptyBlocks(Block& _block);
/// Returns true if a given literal can not be used as an identifier.

View File

@ -70,7 +70,7 @@ class NameDispenser;
* variable references can use the SSA variable. The only exception to this rule are
* for loop conditions, as we cannot insert a variable declaration there.
*
* After this stage, redundantAssignmentRemover is recommended to remove the unnecessary
* After this stage, UnusedAssignmentEliminator is recommended to remove the unnecessary
* intermediate assignments.
*
* This stage provides best results if CSE is run right before it, because

View File

@ -55,7 +55,7 @@
#include <libyul/optimiser/StackLimitEvader.h>
#include <libyul/optimiser/StructuralSimplifier.h>
#include <libyul/optimiser/SyntacticalEquality.h>
#include <libyul/optimiser/RedundantAssignEliminator.h>
#include <libyul/optimiser/UnusedAssignEliminator.h>
#include <libyul/optimiser/VarNameCleaner.h>
#include <libyul/optimiser/LoadResolver.h>
#include <libyul/optimiser/LoopInvariantCodeMotion.h>
@ -118,7 +118,7 @@ void OptimiserSuite::run(
// Some steps depend on properties ensured by FunctionHoister, BlockFlattener, FunctionGrouper and
// ForLoopInitRewriter. Run them first to be able to run arbitrary sequences safely.
suite.runSequence("hfgo", ast);
suite.runSequence("hgfo", ast);
NameSimplifier::run(suite.m_context, ast);
// Now the user-supplied part
@ -219,7 +219,7 @@ map<string, unique_ptr<OptimiserStep>> const& OptimiserSuite::allSteps()
LiteralRematerialiser,
LoadResolver,
LoopInvariantCodeMotion,
RedundantAssignEliminator,
UnusedAssignEliminator,
ReasoningBasedSimplifier,
Rematerialiser,
SSAReverser,
@ -260,7 +260,7 @@ map<string, char> const& OptimiserSuite::stepNameToAbbreviationMap()
{LoadResolver::name, 'L'},
{LoopInvariantCodeMotion::name, 'M'},
{ReasoningBasedSimplifier::name, 'R'},
{RedundantAssignEliminator::name, 'r'},
{UnusedAssignEliminator::name, 'r'},
{Rematerialiser::name, 'm'},
{SSAReverser::name, 'V'},
{SSATransform::name, 'a'},

View File

@ -20,7 +20,7 @@
* until they go out of scope or are re-assigned.
*/
#include <libyul/optimiser/RedundantAssignEliminator.h>
#include <libyul/optimiser/UnusedAssignEliminator.h>
#include <libyul/optimiser/Semantics.h>
#include <libyul/AST.h>
@ -33,36 +33,36 @@ using namespace std;
using namespace solidity;
using namespace solidity::yul;
void RedundantAssignEliminator::run(OptimiserStepContext& _context, Block& _ast)
void UnusedAssignEliminator::run(OptimiserStepContext& _context, Block& _ast)
{
RedundantAssignEliminator rae{_context.dialect};
UnusedAssignEliminator rae{_context.dialect};
rae(_ast);
StatementRemover remover{rae.m_pendingRemovals};
remover(_ast);
}
void RedundantAssignEliminator::operator()(Identifier const& _identifier)
void UnusedAssignEliminator::operator()(Identifier const& _identifier)
{
changeUndecidedTo(_identifier.name, State::Used);
}
void RedundantAssignEliminator::operator()(VariableDeclaration const& _variableDeclaration)
void UnusedAssignEliminator::operator()(VariableDeclaration const& _variableDeclaration)
{
RedundantStoreBase::operator()(_variableDeclaration);
UnusedStoreBase::operator()(_variableDeclaration);
for (auto const& var: _variableDeclaration.variables)
m_declaredVariables.emplace(var.name);
}
void RedundantAssignEliminator::operator()(Assignment const& _assignment)
void UnusedAssignEliminator::operator()(Assignment const& _assignment)
{
visit(*_assignment.value);
for (auto const& var: _assignment.variableNames)
changeUndecidedTo(var.name, State::Unused);
}
void RedundantAssignEliminator::operator()(FunctionDefinition const& _functionDefinition)
void UnusedAssignEliminator::operator()(FunctionDefinition const& _functionDefinition)
{
ScopedSaveAndRestore outerDeclaredVariables(m_declaredVariables, {});
ScopedSaveAndRestore outerReturnVariables(m_returnVariables, {});
@ -70,28 +70,28 @@ void RedundantAssignEliminator::operator()(FunctionDefinition const& _functionDe
for (auto const& retParam: _functionDefinition.returnVariables)
m_returnVariables.insert(retParam.name);
RedundantStoreBase::operator()(_functionDefinition);
UnusedStoreBase::operator()(_functionDefinition);
}
void RedundantAssignEliminator::operator()(Leave const&)
void UnusedAssignEliminator::operator()(Leave const&)
{
for (YulString name: m_returnVariables)
changeUndecidedTo(name, State::Used);
}
void RedundantAssignEliminator::operator()(Block const& _block)
void UnusedAssignEliminator::operator()(Block const& _block)
{
ScopedSaveAndRestore outerDeclaredVariables(m_declaredVariables, {});
RedundantStoreBase::operator()(_block);
UnusedStoreBase::operator()(_block);
for (auto const& var: m_declaredVariables)
finalize(var, State::Unused);
}
void RedundantAssignEliminator::visit(Statement const& _statement)
void UnusedAssignEliminator::visit(Statement const& _statement)
{
RedundantStoreBase::visit(_statement);
UnusedStoreBase::visit(_statement);
if (auto const* assignment = get_if<Assignment>(&_statement))
if (assignment->variableNames.size() == 1)
@ -99,7 +99,7 @@ void RedundantAssignEliminator::visit(Statement const& _statement)
m_stores[assignment->variableNames.front().name][&_statement];
}
void RedundantAssignEliminator::shortcutNestedLoop(TrackedStores const& _zeroRuns)
void UnusedAssignEliminator::shortcutNestedLoop(TrackedStores const& _zeroRuns)
{
// Shortcut to avoid horrible runtime:
// Change all assignments that were newly introduced in the for loop to "used".
@ -116,7 +116,7 @@ void RedundantAssignEliminator::shortcutNestedLoop(TrackedStores const& _zeroRun
}
}
void RedundantAssignEliminator::finalizeFunctionDefinition(FunctionDefinition const& _functionDefinition)
void UnusedAssignEliminator::finalizeFunctionDefinition(FunctionDefinition const& _functionDefinition)
{
for (auto const& param: _functionDefinition.parameters)
finalize(param.name, State::Unused);
@ -124,14 +124,14 @@ void RedundantAssignEliminator::finalizeFunctionDefinition(FunctionDefinition co
finalize(retParam.name, State::Used);
}
void RedundantAssignEliminator::changeUndecidedTo(YulString _variable, RedundantAssignEliminator::State _newState)
void UnusedAssignEliminator::changeUndecidedTo(YulString _variable, UnusedAssignEliminator::State _newState)
{
for (auto& assignment: m_stores[_variable])
if (assignment.second == State::Undecided)
assignment.second = _newState;
}
void RedundantAssignEliminator::finalize(YulString _variable, RedundantAssignEliminator::State _finalState)
void UnusedAssignEliminator::finalize(YulString _variable, UnusedAssignEliminator::State _finalState)
{
std::map<Statement const*, State> stores = std::move(m_stores[_variable]);
m_stores.erase(_variable);

View File

@ -25,7 +25,7 @@
#include <libyul/ASTForward.h>
#include <libyul/optimiser/ASTWalker.h>
#include <libyul/optimiser/OptimiserStep.h>
#include <libyul/optimiser/RedundantStoreBase.h>
#include <libyul/optimiser/UnusedStoreBase.h>
#include <map>
#include <vector>
@ -107,13 +107,13 @@ struct Dialect;
*
* Prerequisite: Disambiguator, ForLoopInitRewriter.
*/
class RedundantAssignEliminator: public RedundantStoreBase
class UnusedAssignEliminator: public UnusedStoreBase
{
public:
static constexpr char const* name{"RedundantAssignEliminator"};
static constexpr char const* name{"UnusedAssignEliminator"};
static void run(OptimiserStepContext&, Block& _ast);
explicit RedundantAssignEliminator(Dialect const& _dialect): RedundantStoreBase(_dialect) {}
explicit UnusedAssignEliminator(Dialect const& _dialect): UnusedStoreBase(_dialect) {}
void operator()(Identifier const& _identifier) override;
void operator()(VariableDeclaration const& _variableDeclaration) override;
@ -122,7 +122,7 @@ public:
void operator()(Leave const&) override;
void operator()(Block const& _block) override;
using RedundantStoreBase::visit;
using UnusedStoreBase::visit;
void visit(Statement const& _statement) override;
private:

View File

@ -21,6 +21,7 @@
#include <libyul/optimiser/UnusedPruner.h>
#include <libyul/optimiser/CallGraphGenerator.h>
#include <libyul/optimiser/FunctionGrouper.h>
#include <libyul/optimiser/NameCollector.h>
#include <libyul/optimiser/Semantics.h>
#include <libyul/optimiser/OptimizerUtilities.h>
@ -33,6 +34,12 @@ using namespace std;
using namespace solidity;
using namespace solidity::yul;
void UnusedPruner::run(OptimiserStepContext& _context, Block& _ast)
{
UnusedPruner::runUntilStabilisedOnFullAST(_context.dialect, _ast, _context.reservedIdentifiers);
FunctionGrouper::run(_context, _ast);
}
UnusedPruner::UnusedPruner(
Dialect const& _dialect,
Block& _ast,

View File

@ -50,9 +50,7 @@ class UnusedPruner: public ASTModifier
{
public:
static constexpr char const* name{"UnusedPruner"};
static void run(OptimiserStepContext& _context, Block& _ast) {
UnusedPruner::runUntilStabilisedOnFullAST(_context.dialect, _ast, _context.reservedIdentifiers);
}
static void run(OptimiserStepContext& _context, Block& _ast);
using ASTModifier::operator();

View File

@ -16,10 +16,10 @@
*/
// SPDX-License-Identifier: GPL-3.0
/**
* Base class for both RedundantAssignEliminator and RedundantStoreEliminator.
* Base class for both UnusedAssignEliminator and UnusedStoreEliminator.
*/
#include <libyul/optimiser/RedundantStoreBase.h>
#include <libyul/optimiser/UnusedStoreBase.h>
#include <libyul/optimiser/Semantics.h>
#include <libyul/optimiser/OptimiserStep.h>
@ -33,7 +33,7 @@ using namespace std;
using namespace solidity;
using namespace solidity::yul;
void RedundantStoreBase::operator()(If const& _if)
void UnusedStoreBase::operator()(If const& _if)
{
visit(*_if.condition);
@ -43,7 +43,7 @@ void RedundantStoreBase::operator()(If const& _if)
merge(m_stores, move(skipBranch));
}
void RedundantStoreBase::operator()(Switch const& _switch)
void UnusedStoreBase::operator()(Switch const& _switch)
{
visit(*_switch.expression);
@ -69,7 +69,7 @@ void RedundantStoreBase::operator()(Switch const& _switch)
merge(m_stores, move(branch));
}
void RedundantStoreBase::operator()(FunctionDefinition const& _functionDefinition)
void UnusedStoreBase::operator()(FunctionDefinition const& _functionDefinition)
{
ScopedSaveAndRestore outerAssignments(m_stores, {});
ScopedSaveAndRestore forLoopInfo(m_forLoopInfo, {});
@ -79,7 +79,7 @@ void RedundantStoreBase::operator()(FunctionDefinition const& _functionDefinitio
finalizeFunctionDefinition(_functionDefinition);
}
void RedundantStoreBase::operator()(ForLoop const& _forLoop)
void UnusedStoreBase::operator()(ForLoop const& _forLoop)
{
ScopedSaveAndRestore outerForLoopInfo(m_forLoopInfo, {});
ScopedSaveAndRestore forLoopNestingDepth(m_forLoopNestingDepth, m_forLoopNestingDepth + 1);
@ -127,19 +127,19 @@ void RedundantStoreBase::operator()(ForLoop const& _forLoop)
m_forLoopInfo.pendingBreakStmts.clear();
}
void RedundantStoreBase::operator()(Break const&)
void UnusedStoreBase::operator()(Break const&)
{
m_forLoopInfo.pendingBreakStmts.emplace_back(move(m_stores));
m_stores.clear();
}
void RedundantStoreBase::operator()(Continue const&)
void UnusedStoreBase::operator()(Continue const&)
{
m_forLoopInfo.pendingContinueStmts.emplace_back(move(m_stores));
m_stores.clear();
}
void RedundantStoreBase::merge(TrackedStores& _target, TrackedStores&& _other)
void UnusedStoreBase::merge(TrackedStores& _target, TrackedStores&& _other)
{
util::joinMap(_target, move(_other), [](
map<Statement const*, State>& _assignmentHere,
@ -150,7 +150,7 @@ void RedundantStoreBase::merge(TrackedStores& _target, TrackedStores&& _other)
});
}
void RedundantStoreBase::merge(TrackedStores& _target, vector<TrackedStores>&& _source)
void UnusedStoreBase::merge(TrackedStores& _target, vector<TrackedStores>&& _source)
{
for (TrackedStores& ts: _source)
merge(_target, move(ts));

View File

@ -16,7 +16,7 @@
*/
// SPDX-License-Identifier: GPL-3.0
/**
* Base class for both RedundantAssignEliminator and RedundantStoreEliminator.
* Base class for both UnusedAssignEliminator and UnusedStoreEliminator.
*/
#pragma once
@ -34,14 +34,19 @@ namespace solidity::yul
struct Dialect;
/**
* Base class for both RedundantAssignEliminator and RedundantStoreEliminator.
* Base class for both UnusedAssignEliminator and UnusedStoreEliminator.
*
* The class tracks the state of abstract "stores" (assignments or mstore/sstore
* statements) across the control-flow. It is the job of the derived class to create
* the stores and track references, but the base class adjusts their "used state" at
* control-flow splits and joins.
*
* Prerequisite: Disambiguator, ForLoopInitRewriter.
*/
class RedundantStoreBase: public ASTWalker
class UnusedStoreBase: public ASTWalker
{
public:
explicit RedundantStoreBase(Dialect const& _dialect): m_dialect(_dialect) {}
explicit UnusedStoreBase(Dialect const& _dialect): m_dialect(_dialect) {}
using ASTWalker::operator();
void operator()(If const& _if) override;

View File

@ -34,7 +34,7 @@ else
BUILD_DIR="$1"
fi
# solbuildpackpusher/solidity-buildpack-deps:emscripten-7
# solbuildpackpusher/solidity-buildpack-deps:emscripten-8
docker run -v "$(pwd):/root/project" -w /root/project \
solbuildpackpusher/solidity-buildpack-deps@sha256:9ffcd0944433fe100e9433f2aa9ba5c21e096e758ad8a05a4a76feaed3d1f463 \
solbuildpackpusher/solidity-buildpack-deps@sha256:842d6074e0e7e5355c89122c1cafc1fdb59696596750e7d56e5f35c0d883ad59 \
./scripts/ci/build_emscripten.sh "$BUILD_DIR"

View File

@ -171,6 +171,18 @@ function msg_on_error
fi
}
function diff_values
{
(( $# >= 2 )) || fail "diff_values requires at least 2 arguments."
local value1="$1"
local value2="$2"
shift
shift
diff --color=auto --unified=0 <(echo "$value1") <(echo "$value2") "$@"
}
function safe_kill
{
local PID=${1}
@ -196,3 +208,25 @@ function safe_kill
kill -9 "$PID"
fi
}
function circleci_select_steps
{
local all_steps="$1"
(( $# == 1 )) || assertFail
if (( CIRCLE_NODE_TOTAL )) && (( CIRCLE_NODE_TOTAL > 1 ))
then
echo "$all_steps" | circleci tests split | xargs
else
echo "$all_steps" | xargs
fi
}
function circleci_step_selected
{
local selected_steps="$1"
local step="$2"
[[ $step != *" "* ]] || assertFail "Step names must not contain spaces."
[[ " $selected_steps " == *" $step "* ]] || return 1
}

View File

@ -25,7 +25,7 @@ set -ev
keyid=70D110489D66E2F6
email=builds@ethereum.org
packagename=z3-static
version=4.8.12
version=4.8.13
DISTRIBUTIONS="focal groovy hirsute"

View File

@ -33,12 +33,12 @@
# Using $(em-config CACHE)/sysroot/usr seems to work, though, and still has cmake find the
# dependencies automatically.
FROM emscripten/emsdk:2.0.33 AS base
LABEL version="7"
LABEL version="8"
ADD emscripten.jam /usr/src
RUN set -ex; \
cd /usr/src; \
git clone https://github.com/Z3Prover/z3.git -b z3-4.8.12 --depth 1 ; \
git clone https://github.com/Z3Prover/z3.git -b z3-4.8.13 --depth 1 ; \
cd z3; \
mkdir build; \
cd build; \

View File

@ -22,7 +22,7 @@
# (c) 2016-2021 solidity contributors.
#------------------------------------------------------------------------------
FROM gcr.io/oss-fuzz-base/base-clang:latest as base
LABEL version="13"
LABEL version="14"
ARG DEBIAN_FRONTEND=noninteractive
@ -61,7 +61,7 @@ RUN set -ex; \
# Z3
RUN set -ex; \
git clone --depth 1 -b z3-4.8.12 https://github.com/Z3Prover/z3.git \
git clone --depth 1 -b z3-4.8.13 https://github.com/Z3Prover/z3.git \
/usr/src/z3; \
cd /usr/src/z3; \
mkdir build; \
@ -102,18 +102,6 @@ RUN set -ex; \
ninja install/strip; \
rm -rf /usr/src/evmone
# HERA
RUN set -ex; \
cd /usr/src; \
git clone --branch="v0.5.0" --depth 1 --recurse-submodules https://github.com/ewasm/hera.git; \
cd hera; \
mkdir build; \
cd build; \
cmake -G Ninja -DBUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_PREFIX="/usr" ..; \
ninja; \
ninja install/strip; \
rm -rf /usr/src/hera
# gmp
RUN set -ex; \
# Replace system installed libgmp static library

View File

@ -22,7 +22,7 @@
# (c) 2016-2019 solidity contributors.
#------------------------------------------------------------------------------
FROM buildpack-deps:focal AS base
LABEL version="8"
LABEL version="9"
ARG DEBIAN_FRONTEND=noninteractive

View File

@ -22,7 +22,7 @@
# (c) 2016-2019 solidity contributors.
#------------------------------------------------------------------------------
FROM buildpack-deps:focal AS base
LABEL version="8"
LABEL version="9"
ARG DEBIAN_FRONTEND=noninteractive

View File

@ -1,6 +1,7 @@
set(libsolcli_sources
CommandLineInterface.cpp CommandLineInterface.h
CommandLineParser.cpp CommandLineParser.h
Exceptions.h
)
add_library(solcli ${libsolcli_sources})

View File

@ -23,6 +23,8 @@
*/
#include <solc/CommandLineInterface.h>
#include <solc/Exceptions.h>
#include "license.h"
#include "solidity/BuildInfo.h"
@ -402,7 +404,7 @@ void CommandLineInterface::handleGasEstimation(string const& _contract)
}
}
bool CommandLineInterface::readInputFiles()
void CommandLineInterface::readInputFiles()
{
solAssert(!m_standardJsonInput.has_value(), "");
@ -411,23 +413,17 @@ bool CommandLineInterface::readInputFiles()
m_options.input.mode == InputMode::License ||
m_options.input.mode == InputMode::Version
)
return true;
return;
m_fileReader.setBasePath(m_options.input.basePath);
if (m_fileReader.basePath() != "")
{
if (!boost::filesystem::exists(m_fileReader.basePath()))
{
serr() << "Base path does not exist: " << m_fileReader.basePath() << endl;
return false;
}
solThrow(CommandLineValidationError, "Base path does not exist: \"" + m_fileReader.basePath().string() + '"');
if (!boost::filesystem::is_directory(m_fileReader.basePath()))
{
serr() << "Base path is not a directory: " << m_fileReader.basePath() << endl;
return false;
}
solThrow(CommandLineValidationError, "Base path is not a directory: \"" + m_fileReader.basePath().string() + '"');
}
for (boost::filesystem::path const& includePath: m_options.input.includePaths)
@ -442,16 +438,18 @@ bool CommandLineInterface::readInputFiles()
{
auto pathToQuotedString = [](boost::filesystem::path const& _path){ return "\"" + _path.string() + "\""; };
serr() << "Source unit name collision detected. ";
serr() << "The specified values of base path and/or include paths would result in multiple ";
serr() << "input files being assigned the same source unit name:" << endl;
string message =
"Source unit name collision detected. "
"The specified values of base path and/or include paths would result in multiple "
"input files being assigned the same source unit name:\n";
for (auto const& [sourceUnitName, normalizedInputPaths]: collisions)
{
serr() << sourceUnitName << " matches: ";
serr() << joinHumanReadable(normalizedInputPaths | ranges::views::transform(pathToQuotedString)) << endl;
message += sourceUnitName + " matches: ";
message += joinHumanReadable(normalizedInputPaths | ranges::views::transform(pathToQuotedString)) + "\n";
}
return false;
solThrow(CommandLineValidationError, message);
}
for (boost::filesystem::path const& infile: m_options.input.paths)
@ -459,10 +457,7 @@ bool CommandLineInterface::readInputFiles()
if (!boost::filesystem::exists(infile))
{
if (!m_options.input.ignoreMissingFiles)
{
serr() << infile << " is not found." << endl;
return false;
}
solThrow(CommandLineValidationError, '"' + infile.string() + "\" is not found.");
else
serr() << infile << " is not found. Skipping." << endl;
@ -472,10 +467,7 @@ bool CommandLineInterface::readInputFiles()
if (!boost::filesystem::is_regular_file(infile))
{
if (!m_options.input.ignoreMissingFiles)
{
serr() << infile << " is not a valid file." << endl;
return false;
}
solThrow(CommandLineValidationError, '"' + infile.string() + "\" is not a valid file.");
else
serr() << infile << " is not a valid file. Skipping." << endl;
@ -508,12 +500,7 @@ bool CommandLineInterface::readInputFiles()
}
if (m_fileReader.sourceCodes().empty() && !m_standardJsonInput.has_value())
{
serr() << "All specified input files either do not exist or are not regular files." << endl;
return false;
}
return true;
solThrow(CommandLineValidationError, "All specified input files either do not exist or are not regular files.");
}
map<string, Json::Value> CommandLineInterface::parseAstFromInput()
@ -559,19 +546,12 @@ void CommandLineInterface::createFile(string const& _fileName, string const& _da
string pathName = (m_options.output.dir / _fileName).string();
if (fs::exists(pathName) && !m_options.output.overwriteFiles)
{
serr() << "Refusing to overwrite existing file \"" << pathName << "\" (use --overwrite to force)." << endl;
m_outputFailed = true;
return;
}
solThrow(CommandLineOutputError, "Refusing to overwrite existing file \"" + pathName + "\" (use --overwrite to force).");
ofstream outFile(pathName);
outFile << _data;
if (!outFile)
{
serr() << "Could not write to file \"" << pathName << "\"." << endl;
m_outputFailed = true;
return;
}
solThrow(CommandLineOutputError, "Could not write to file \"" + pathName + "\".");
}
void CommandLineInterface::createJson(string const& _fileName, string const& _json)
@ -579,9 +559,33 @@ void CommandLineInterface::createJson(string const& _fileName, string const& _js
createFile(boost::filesystem::basename(_fileName) + string(".json"), _json);
}
bool CommandLineInterface::run(int _argc, char const* const* _argv)
{
try
{
if (!parseArguments(_argc, _argv))
return false;
readInputFiles();
processInput();
return true;
}
catch (CommandLineError const& _exception)
{
m_hasOutput = true;
// There might be no message in the exception itself if the error output is bulky and has
// already been printed to stderr (this happens e.g. for compiler errors).
if (_exception.what() != ""s)
serr() << _exception.what() << endl;
return false;
}
}
bool CommandLineInterface::parseArguments(int _argc, char const* const* _argv)
{
CommandLineParser parser(serr(/* _markAsUsed */ false));
CommandLineParser parser;
if (isatty(fileno(stdin)) && _argc == 1)
{
@ -592,16 +596,13 @@ bool CommandLineInterface::parseArguments(int _argc, char const* const* _argv)
return false;
}
bool success = parser.parse(_argc, _argv);
if (!success)
return false;
m_hasOutput = m_hasOutput || parser.hasOutput();
parser.parse(_argc, _argv);
m_options = parser.options();
return true;
}
bool CommandLineInterface::processInput()
void CommandLineInterface::processInput()
{
switch (m_options.input.mode)
{
@ -624,22 +625,17 @@ bool CommandLineInterface::processInput()
break;
}
case InputMode::Assembler:
if (!assemble(m_options.assembly.inputLanguage, m_options.assembly.targetMachine))
return false;
assemble(m_options.assembly.inputLanguage, m_options.assembly.targetMachine);
break;
case InputMode::Linker:
if (!link())
return false;
link();
writeLinkedFiles();
break;
case InputMode::Compiler:
case InputMode::CompilerWithASTImport:
if (!compile())
return false;
compile();
outputCompilationResults();
}
return !m_outputFailed;
}
void CommandLineInterface::printVersion()
@ -655,7 +651,7 @@ void CommandLineInterface::printLicense()
sout() << licenseText << endl;
}
bool CommandLineInterface::compile()
void CommandLineInterface::compile()
{
solAssert(m_options.input.mode == InputMode::Compiler || m_options.input.mode == InputMode::CompilerWithASTImport, "");
@ -718,8 +714,9 @@ bool CommandLineInterface::compile()
}
catch (Exception const& _exc)
{
serr() << string("Failed to import AST: ") << _exc.what() << endl;
return false;
// FIXME: AST import is missing proper validations. This hack catches failing
// assertions and presents them as if they were compiler errors.
solThrow(CommandLineExecutionError, "Failed to import AST: "s + _exc.what());
}
}
else
@ -736,29 +733,29 @@ bool CommandLineInterface::compile()
formatter.printErrorInformation(*error);
}
if (!successful)
return m_options.input.errorRecovery;
if (!successful && !m_options.input.errorRecovery)
solThrow(CommandLineExecutionError, "");
}
catch (CompilerError const& _exception)
{
m_hasOutput = true;
formatter.printExceptionInformation(_exception, "Compiler error");
return false;
solThrow(CommandLineExecutionError, "");
}
catch (Error const& _error)
{
if (_error.type() == Error::Type::DocstringParsingError)
serr() << "Documentation parsing error: " << *boost::get_error_info<errinfo_comment>(_error) << endl;
{
serr() << *boost::get_error_info<errinfo_comment>(_error);
solThrow(CommandLineExecutionError, "Documentation parsing failed.");
}
else
{
m_hasOutput = true;
formatter.printExceptionInformation(_error, _error.typeName());
solThrow(CommandLineExecutionError, "");
}
return false;
}
return true;
}
void CommandLineInterface::handleCombinedJSON()
@ -887,7 +884,7 @@ void CommandLineInterface::handleAst()
}
}
bool CommandLineInterface::link()
void CommandLineInterface::link()
{
solAssert(m_options.input.mode == InputMode::Linker, "");
@ -925,11 +922,11 @@ bool CommandLineInterface::link()
*(it + placeholderSize - 2) != '_' ||
*(it + placeholderSize - 1) != '_'
)
{
serr() << "Error in binary object file " << src.first << " at position " << (it - src.second.begin()) << endl;
serr() << '"' << string(it, it + min(placeholderSize, static_cast<int>(end - it))) << "\" is not a valid link reference." << endl;
return false;
}
solThrow(
CommandLineExecutionError,
"Error in binary object file " + src.first + " at position " + to_string(it - src.second.begin()) + "\n" +
'"' + string(it, it + min(placeholderSize, static_cast<int>(end - it))) + "\" is not a valid link reference."
);
string foundPlaceholder(it, it + placeholderSize);
if (librariesReplacements.count(foundPlaceholder))
@ -948,8 +945,6 @@ bool CommandLineInterface::link()
src.second.resize(src.second.size() - 1);
}
m_fileReader.setSources(move(sourceCodes));
return true;
}
void CommandLineInterface::writeLinkedFiles()
@ -964,11 +959,7 @@ void CommandLineInterface::writeLinkedFiles()
ofstream outFile(src.first);
outFile << src.second;
if (!outFile)
{
serr() << "Could not write to file " << src.first << ". Aborting." << endl;
m_outputFailed = true;
return;
}
solThrow(CommandLineOutputError, "Could not write to file " + src.first + ". Aborting.");
}
sout() << "Linking completed." << endl;
}
@ -990,10 +981,12 @@ string CommandLineInterface::objectWithLinkRefsHex(evmasm::LinkerObject const& _
return out;
}
bool CommandLineInterface::assemble(yul::AssemblyStack::Language _language, yul::AssemblyStack::Machine _targetMachine)
void CommandLineInterface::assemble(yul::AssemblyStack::Language _language, yul::AssemblyStack::Machine _targetMachine)
{
solAssert(m_options.input.mode == InputMode::Assembler, "");
serr() << "Warning: Yul is still experimental. Please use the output with care." << endl;
bool successful = true;
map<string, yul::AssemblyStack> assemblyStacks;
for (auto const& src: m_fileReader.sourceCodes())
@ -1031,7 +1024,10 @@ bool CommandLineInterface::assemble(yul::AssemblyStack::Language _language, yul:
}
if (!successful)
return false;
{
solAssert(m_hasOutput);
solThrow(CommandLineExecutionError, "");
}
for (auto const& src: m_fileReader.sourceCodes())
{
@ -1089,8 +1085,6 @@ bool CommandLineInterface::assemble(yul::AssemblyStack::Language _language, yul:
serr() << "No text representation found." << endl;
}
}
return true;
}
void CommandLineInterface::outputCompilationResults()
@ -1127,13 +1121,9 @@ void CommandLineInterface::outputCompilationResults()
ret = m_compiler->assemblyString(contract, m_fileReader.sourceCodes());
if (!m_options.output.dir.empty())
{
createFile(m_compiler->filesystemFriendlyName(contract) + (m_options.compiler.outputs.asmJson ? "_evm.json" : ".evm"), ret);
}
else
{
sout() << "EVM assembly:" << endl << ret << endl;
}
}
if (m_options.compiler.estimateGas)

View File

@ -51,12 +51,28 @@ public:
m_options(_options)
{}
/// Parse command line arguments and return false if we should not continue
/// Parses command-line arguments, executes the requested operation and handles validation and
/// execution errors.
/// @returns false if it catches a @p CommandLineValidationError or if the application is
/// expected to exit with a non-zero exit code despite there being no error.
bool run(int _argc, char const* const* _argv);
/// Parses command line arguments and stores the result in @p m_options.
/// @throws CommandLineValidationError if command-line arguments are invalid.
/// @returns false if the application is expected to exit with a non-zero exit code despite
/// there being no error.
bool parseArguments(int _argc, char const* const* _argv);
/// Read the content of all input files and initialize the file reader.
bool readInputFiles();
/// Parse the files, create source code objects, print the output.
bool processInput();
/// Reads the content of all input files and initializes the file reader.
/// @throws CommandLineValidationError if it fails to read the input files (invalid paths,
/// non-existent files, not enough or too many input files, etc.).
void readInputFiles();
/// Executes the requested operation (compilation, assembling, standard JSON, etc.) and prints
/// results to the terminal.
/// @throws CommandLineExecutionError if execution fails due to errors in the input files.
/// @throws CommandLineOutputError if creating output files or writing to them fails.
void processInput();
CommandLineOptions const& options() const { return m_options; }
FileReader const& fileReader() const { return m_fileReader; }
@ -65,15 +81,15 @@ public:
private:
void printVersion();
void printLicense();
bool compile();
bool link();
void compile();
void link();
void writeLinkedFiles();
/// @returns the ``// <identifier> -> name`` hint for library placeholders.
static std::string libraryPlaceholderHint(std::string const& _libraryName);
/// @returns the full object with library placeholder hints in hex.
static std::string objectWithLinkRefsHex(evmasm::LinkerObject const& _obj);
bool assemble(yul::AssemblyStack::Language _language, yul::AssemblyStack::Machine _targetMachine);
void assemble(yul::AssemblyStack::Language _language, yul::AssemblyStack::Machine _targetMachine);
void outputCompilationResults();
@ -120,7 +136,6 @@ private:
std::ostream& m_sout;
std::ostream& m_serr;
bool m_hasOutput = false;
bool m_outputFailed = false; ///< If true, creation or write to some of the output files failed.
FileReader m_fileReader;
std::optional<std::string> m_standardJsonInput;
std::unique_ptr<frontend::CompilerStack> m_compiler;

View File

@ -17,7 +17,11 @@
// SPDX-License-Identifier: GPL-3.0
#include <solc/CommandLineParser.h>
#include <solc/Exceptions.h>
#include <libyul/optimiser/Suite.h>
#include <liblangutil/EVMVersion.h>
#include <boost/algorithm/string.hpp>
@ -34,14 +38,6 @@ namespace po = boost::program_options;
namespace solidity::frontend
{
ostream& CommandLineParser::serr()
{
m_hasOutput = true;
return m_serr;
}
#define cerr
static string const g_strAllowPaths = "allow-paths";
static string const g_strBasePath = "base-path";
static string const g_strIncludePath = "include-path";
@ -131,6 +127,9 @@ static set<string> const g_metadataHashArgs
};
static map<InputMode, string> const g_inputModeName = {
{InputMode::Help, "help"},
{InputMode::License, "license"},
{InputMode::Version, "version"},
{InputMode::Compiler, "compiler"},
{InputMode::CompilerWithASTImport, "compiler (AST import)"},
{InputMode::Assembler, "assembler"},
@ -138,15 +137,16 @@ static map<InputMode, string> const g_inputModeName = {
{InputMode::Linker, "linker"},
};
bool CommandLineParser::checkMutuallyExclusive(vector<string> const& _optionNames)
void CommandLineParser::checkMutuallyExclusive(vector<string> const& _optionNames)
{
if (countEnabledOptions(_optionNames) > 1)
{
serr() << "The following options are mutually exclusive: " << joinOptionNames(_optionNames) << ". ";
serr() << "Select at most one." << endl;
return false;
solThrow(
CommandLineValidationError,
"The following options are mutually exclusive: " + joinOptionNames(_optionNames) + ". " +
"Select at most one."
);
}
return true;
}
bool CompilerOutputs::operator==(CompilerOutputs const& _other) const noexcept
@ -268,17 +268,13 @@ OptimiserSettings CommandLineOptions::optimiserSettings() const
return settings;
}
bool CommandLineParser::parse(int _argc, char const* const* _argv)
void CommandLineParser::parse(int _argc, char const* const* _argv)
{
m_hasOutput = false;
if (!parseArgs(_argc, _argv))
return false;
return processArgs();
parseArgs(_argc, _argv);
processArgs();
}
bool CommandLineParser::parseInputPathsAndRemappings()
void CommandLineParser::parseInputPathsAndRemappings()
{
m_options.input.ignoreMissingFiles = (m_args.count(g_strIgnoreMissingFiles) > 0);
@ -289,17 +285,14 @@ bool CommandLineParser::parseInputPathsAndRemappings()
{
optional<ImportRemapper::Remapping> remapping = ImportRemapper::parseRemapping(positionalArg);
if (!remapping.has_value())
{
serr() << "Invalid remapping: \"" << positionalArg << "\"." << endl;
return false;
}
solThrow(CommandLineValidationError, "Invalid remapping: \"" + positionalArg + "\".");
if (m_options.input.mode == InputMode::StandardJson)
{
serr() << "Import remappings are not accepted on the command line in Standard JSON mode." << endl;
serr() << "Please put them under 'settings.remappings' in the JSON input." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Import remappings are not accepted on the command line in Standard JSON mode.\n"
"Please put them under 'settings.remappings' in the JSON input."
);
m_options.input.remappings.emplace_back(move(remapping.value()));
}
@ -312,26 +305,24 @@ bool CommandLineParser::parseInputPathsAndRemappings()
if (m_options.input.mode == InputMode::StandardJson)
{
if (m_options.input.paths.size() > 1 || (m_options.input.paths.size() == 1 && m_options.input.addStdin))
{
serr() << "Too many input files for --" << g_strStandardJSON << "." << endl;
serr() << "Please either specify a single file name or provide its content on standard input." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Too many input files for --" + g_strStandardJSON + ".\n"
"Please either specify a single file name or provide its content on standard input."
);
else if (m_options.input.paths.size() == 0)
// Standard JSON mode input used to be handled separately and zero files meant "read from stdin".
// Keep it working that way for backwards-compatibility.
m_options.input.addStdin = true;
}
else if (m_options.input.paths.size() == 0 && !m_options.input.addStdin)
{
serr() << "No input files given. If you wish to use the standard input please specify \"-\" explicitly." << endl;
return false;
}
return true;
solThrow(
CommandLineValidationError,
"No input files given. If you wish to use the standard input please specify \"-\" explicitly."
);
}
bool CommandLineParser::parseLibraryOption(string const& _input)
void CommandLineParser::parseLibraryOption(string const& _input)
{
namespace fs = boost::filesystem;
string data = _input;
@ -366,71 +357,71 @@ bool CommandLineParser::parseLibraryOption(string const& _input)
{
separator = lib.rfind(':');
if (separator == string::npos)
{
serr() << "Equal sign separator missing in library address specifier \"" << lib << "\"" << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Equal sign separator missing in library address specifier \"" + lib + "\""
);
else
isSeparatorEqualSign = false; // separator is colon
}
else
if (lib.rfind('=') != lib.find('='))
{
serr() << "Only one equal sign \"=\" is allowed in the address string \"" << lib << "\"." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Only one equal sign \"=\" is allowed in the address string \"" + lib + "\"."
);
string libName(lib.begin(), lib.begin() + static_cast<ptrdiff_t>(separator));
boost::trim(libName);
if (m_options.linker.libraries.count(libName))
{
serr() << "Address specified more than once for library \"" << libName << "\"." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Address specified more than once for library \"" + libName + "\"."
);
string addrString(lib.begin() + static_cast<ptrdiff_t>(separator) + 1, lib.end());
boost::trim(addrString);
if (addrString.empty())
{
serr() << "Empty address provided for library \"" << libName << "\"." << endl;
serr() << "Note that there should not be any whitespace after the " << (isSeparatorEqualSign ? "equal sign" : "colon") << "." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Empty address provided for library \"" + libName + "\".\n"
"Note that there should not be any whitespace after the " +
(isSeparatorEqualSign ? "equal sign" : "colon") + "."
);
if (addrString.substr(0, 2) == "0x")
addrString = addrString.substr(2);
else
{
serr() << "The address " << addrString << " is not prefixed with \"0x\"." << endl;
serr() << "Note that the address must be prefixed with \"0x\"." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"The address " + addrString + " is not prefixed with \"0x\".\n"
"Note that the address must be prefixed with \"0x\"."
);
if (addrString.length() != 40)
{
serr() << "Invalid length for address for library \"" << libName << "\": " << addrString.length() << " instead of 40 characters." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Invalid length for address for library \"" + libName + "\": " +
to_string(addrString.length()) + " instead of 40 characters."
);
if (!passesAddressChecksum(addrString, false))
{
serr() << "Invalid checksum on address for library \"" << libName << "\": " << addrString << endl;
serr() << "The correct checksum is " << getChecksummedAddress(addrString) << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Invalid checksum on address for library \"" + libName + "\": " + addrString + "\n"
"The correct checksum is " + getChecksummedAddress(addrString)
);
bytes binAddr = fromHex(addrString);
h160 address(binAddr, h160::AlignRight);
if (binAddr.size() > 20 || address == h160())
{
serr() << "Invalid address for library \"" << libName << "\": " << addrString << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Invalid address for library \"" + libName + "\": " + addrString
);
m_options.linker.libraries[libName] = address;
}
return true;
}
bool CommandLineParser::parseOutputSelection()
void CommandLineParser::parseOutputSelection()
{
static auto outputSupported = [](InputMode _mode, string_view _outputName)
{
@ -486,13 +477,11 @@ bool CommandLineParser::parseOutputSelection()
unsupportedOutputs.push_back(optionName);
if (!unsupportedOutputs.empty())
{
serr() << "The following outputs are not supported in " << g_inputModeName.at(m_options.input.mode) << " mode: ";
serr() << joinOptionNames(unsupportedOutputs) << ".";
return false;
}
return true;
solThrow(
CommandLineValidationError,
"The following outputs are not supported in " + g_inputModeName.at(m_options.input.mode) + " mode: " +
joinOptionNames(unsupportedOutputs) + "."
);
}
po::options_description CommandLineParser::optionsDescription()
@ -831,7 +820,7 @@ po::positional_options_description CommandLineParser::positionalOptionsDescripti
return filesPositions;
}
bool CommandLineParser::parseArgs(int _argc, char const* const* _argv)
void CommandLineParser::parseArgs(int _argc, char const* const* _argv)
{
po::options_description allOptions = optionsDescription();
po::positional_options_description filesPositions = positionalOptionsDescription();
@ -846,18 +835,15 @@ bool CommandLineParser::parseArgs(int _argc, char const* const* _argv)
}
catch (po::error const& _exception)
{
serr() << _exception.what() << endl;
return false;
solThrow(CommandLineValidationError, _exception.what());
}
po::notify(m_args);
return true;
}
bool CommandLineParser::processArgs()
void CommandLineParser::processArgs()
{
if (!checkMutuallyExclusive({
checkMutuallyExclusive({
g_strHelp,
g_strLicense,
g_strVersion,
@ -867,8 +853,7 @@ bool CommandLineParser::processArgs()
g_strStrictAssembly,
g_strYul,
g_strImportAst,
}))
return false;
});
if (m_args.count(g_strHelp) > 0)
m_options.input.mode = InputMode::Help;
@ -892,7 +877,7 @@ bool CommandLineParser::processArgs()
m_options.input.mode == InputMode::License ||
m_options.input.mode == InputMode::Version
)
return true;
return;
map<string, set<InputMode>> validOptionInputModeCombinations = {
// TODO: This should eventually contain all options.
@ -907,13 +892,13 @@ bool CommandLineParser::processArgs()
}
if (!invalidOptionsForCurrentInputMode.empty())
{
serr() << "The following options are not supported in the current input mode: " << joinOptionNames(invalidOptionsForCurrentInputMode) << endl;
return false;
}
solThrow(
CommandLineValidationError,
"The following options are not supported in the current input mode: " +
joinOptionNames(invalidOptionsForCurrentInputMode)
);
if (!checkMutuallyExclusive({g_strColor, g_strNoColor}))
return false;
checkMutuallyExclusive({g_strColor, g_strNoColor});
array<string, 9> const conflictingWithStopAfter{
CompilerOutputs::componentName(&CompilerOutputs::binary),
@ -928,8 +913,7 @@ bool CommandLineParser::processArgs()
};
for (auto& option: conflictingWithStopAfter)
if (!checkMutuallyExclusive({g_strStopAfter, option}))
return false;
checkMutuallyExclusive({g_strStopAfter, option});
if (
m_options.input.mode != InputMode::Compiler &&
@ -938,23 +922,23 @@ bool CommandLineParser::processArgs()
)
{
if (!m_args[g_strOptimizeRuns].defaulted())
{
serr() << "Option --" << g_strOptimizeRuns << " is only valid in compiler and assembler modes." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Option --" + g_strOptimizeRuns + " is only valid in compiler and assembler modes."
);
for (string const& option: {g_strOptimize, g_strNoOptimizeYul, g_strOptimizeYul, g_strYulOptimizations})
if (m_args.count(option) > 0)
{
serr() << "Option --" << option << " is only valid in compiler and assembler modes." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Option --" + option + " is only valid in compiler and assembler modes."
);
if (!m_args[g_strDebugInfo].defaulted())
{
serr() << "Option --" << g_strDebugInfo << " is only valid in compiler and assembler modes." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Option --" + g_strDebugInfo + " is only valid in compiler and assembler modes."
);
}
if (m_args.count(g_strColor) > 0)
@ -969,15 +953,15 @@ bool CommandLineParser::processArgs()
string revertStringsString = m_args[g_strRevertStrings].as<string>();
std::optional<RevertStrings> revertStrings = revertStringsFromString(revertStringsString);
if (!revertStrings)
{
serr() << "Invalid option for --" << g_strRevertStrings << ": " << revertStringsString << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Invalid option for --" + g_strRevertStrings + ": " + revertStringsString
);
if (*revertStrings == RevertStrings::VerboseDebug)
{
serr() << "Only \"default\", \"strip\" and \"debug\" are implemented for --" << g_strRevertStrings << " for now." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"Only \"default\", \"strip\" and \"debug\" are implemented for --" + g_strRevertStrings + " for now."
);
m_options.output.revertStrings = *revertStrings;
}
@ -986,20 +970,13 @@ bool CommandLineParser::processArgs()
string optionValue = m_args[g_strDebugInfo].as<string>();
m_options.output.debugInfoSelection = DebugInfoSelection::fromString(optionValue);
if (!m_options.output.debugInfoSelection.has_value())
{
serr() << "Invalid value for --" << g_strDebugInfo << " option: " << optionValue << endl;
return false;
}
solThrow(CommandLineValidationError, "Invalid value for --" + g_strDebugInfo + " option: " + optionValue);
if (m_options.output.debugInfoSelection->snippet && !m_options.output.debugInfoSelection->location)
{
serr() << "To use 'snippet' with --" << g_strDebugInfo << " you must select also 'location'." << endl;
return false;
}
solThrow(CommandLineValidationError, "To use 'snippet' with --" + g_strDebugInfo + " you must select also 'location'.");
}
if (!parseCombinedJsonOption())
return false;
parseCombinedJsonOption();
if (m_args.count(g_strOutputDir))
m_options.output.dir = m_args.at(g_strOutputDir).as<string>();
@ -1016,8 +993,7 @@ bool CommandLineParser::processArgs()
m_options.formatting.json.indent = m_args[g_strJsonIndent].as<uint32_t>();
}
if (!parseOutputSelection())
return false;
parseOutputSelection();
m_options.compiler.estimateGas = (m_args.count(g_strGas) > 0);
@ -1027,18 +1003,13 @@ bool CommandLineParser::processArgs()
if (m_args.count(g_strIncludePath) > 0)
{
if (m_options.input.basePath.empty())
{
serr() << "--" << g_strIncludePath << " option requires a non-empty base path." << endl;
return false;
}
solThrow(CommandLineValidationError, "--" + g_strIncludePath + " option requires a non-empty base path.");
for (string const& includePath: m_args[g_strIncludePath].as<vector<string>>())
{
if (includePath.empty())
{
serr() << "Empty values are not allowed in --" << g_strIncludePath << "." << endl;
return false;
}
solThrow(CommandLineValidationError, "Empty values are not allowed in --" + g_strIncludePath + ".");
m_options.input.includePaths.push_back(includePath);
}
}
@ -1054,37 +1025,29 @@ bool CommandLineParser::processArgs()
if (m_args.count(g_strStopAfter))
{
if (m_args[g_strStopAfter].as<string>() != "parsing")
{
serr() << "Valid options for --" << g_strStopAfter << " are: \"parsing\".\n";
return false;
}
solThrow(CommandLineValidationError, "Valid options for --" + g_strStopAfter + " are: \"parsing\".\n");
else
m_options.output.stopAfter = CompilerStack::State::Parsed;
}
if (!parseInputPathsAndRemappings())
return false;
parseInputPathsAndRemappings();
if (m_options.input.mode == InputMode::StandardJson)
return true;
return;
if (m_args.count(g_strLibraries))
for (string const& library: m_args[g_strLibraries].as<vector<string>>())
if (!parseLibraryOption(library))
return false;
parseLibraryOption(library);
if (m_options.input.mode == InputMode::Linker)
return true;
return;
if (m_args.count(g_strEVMVersion))
{
string versionOptionStr = m_args[g_strEVMVersion].as<string>();
std::optional<langutil::EVMVersion> versionOption = langutil::EVMVersion::fromString(versionOptionStr);
if (!versionOption)
{
serr() << "Invalid option for --" << g_strEVMVersion << ": " << versionOptionStr << endl;
return false;
}
solThrow(CommandLineValidationError, "Invalid option for --" + g_strEVMVersion + ": " + versionOptionStr);
m_options.output.evmVersion = *versionOption;
}
@ -1097,10 +1060,7 @@ bool CommandLineParser::processArgs()
{
OptimiserSettings optimiserSettings = m_options.optimiserSettings();
if (!optimiserSettings.runYulOptimiser)
{
serr() << "--" << g_strYulOptimizations << " is invalid if Yul optimizer is disabled" << endl;
return false;
}
solThrow(CommandLineValidationError, "--" + g_strYulOptimizations + " is invalid if Yul optimizer is disabled");
try
{
@ -1108,8 +1068,10 @@ bool CommandLineParser::processArgs()
}
catch (yul::OptimizerException const& _exception)
{
serr() << "Invalid optimizer step sequence in --" << g_strYulOptimizations << ": " << _exception.what() << endl;
return false;
solThrow(
CommandLineValidationError,
"Invalid optimizer step sequence in --" + g_strYulOptimizations + ": " + _exception.what()
);
}
m_options.optimizer.yulSteps = m_args[g_strYulOptimizations].as<string>();
@ -1130,12 +1092,11 @@ bool CommandLineParser::processArgs()
auto optionEnabled = [&](string const& name){ return m_args.count(name) > 0; };
auto enabledOptions = nonAssemblyModeOptions | ranges::views::filter(optionEnabled) | ranges::to_vector;
serr() << "The following options are invalid in assembly mode: ";
serr() << joinOptionNames(enabledOptions) << ".";
string message = "The following options are invalid in assembly mode: " + joinOptionNames(enabledOptions) + ".";
if (m_args.count(g_strOptimizeYul) || m_args.count(g_strNoOptimizeYul))
serr() << " Optimization is disabled by default and can be enabled with --" << g_strOptimize << "." << endl;
serr() << endl;
return false;
message += " Optimization is disabled by default and can be enabled with --" + g_strOptimize + ".";
solThrow(CommandLineValidationError, message);
}
// switch to assembly mode
@ -1151,10 +1112,7 @@ bool CommandLineParser::processArgs()
else if (machine == g_strEwasm)
m_options.assembly.targetMachine = Machine::Ewasm;
else
{
serr() << "Invalid option for --" << g_strMachine << ": " << machine << endl;
return false;
}
solThrow(CommandLineValidationError, "Invalid option for --" + g_strMachine + ": " + machine);
}
if (m_options.assembly.targetMachine == Machine::Ewasm && m_options.assembly.inputLanguage == Input::StrictAssembly)
m_options.assembly.inputLanguage = Input::Ewasm;
@ -1167,45 +1125,33 @@ bool CommandLineParser::processArgs()
{
m_options.assembly.inputLanguage = Input::Ewasm;
if (m_options.assembly.targetMachine != Machine::Ewasm)
{
serr() << "If you select Ewasm as --" << g_strYulDialect << ", ";
serr() << "--" << g_strMachine << " has to be Ewasm as well." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"If you select Ewasm as --" + g_strYulDialect + ", "
"--" + g_strMachine + " has to be Ewasm as well."
);
}
else
{
serr() << "Invalid option for --" << g_strYulDialect << ": " << dialect << endl;
return false;
}
solThrow(CommandLineValidationError, "Invalid option for --" + g_strYulDialect + ": " + dialect);
}
if (m_options.optimizer.enabled && (m_options.assembly.inputLanguage != Input::StrictAssembly && m_options.assembly.inputLanguage != Input::Ewasm))
{
serr() <<
"Optimizer can only be used for strict assembly. Use --" <<
g_strStrictAssembly <<
"." <<
endl;
return false;
}
solThrow(
CommandLineValidationError,
"Optimizer can only be used for strict assembly. Use --" + g_strStrictAssembly + "."
);
if (m_options.assembly.targetMachine == Machine::Ewasm && m_options.assembly.inputLanguage != Input::StrictAssembly && m_options.assembly.inputLanguage != Input::Ewasm)
{
serr() << "The selected input language is not directly supported when targeting the Ewasm machine ";
serr() << "and automatic translation is not available." << endl;
return false;
}
serr() <<
"Warning: Yul is still experimental. Please use the output with care." <<
endl;
return true;
solThrow(
CommandLineValidationError,
"The selected input language is not directly supported when targeting the Ewasm machine "
"and automatic translation is not available."
);
return;
}
else if (countEnabledOptions({g_strYulDialect, g_strMachine}) >= 1)
{
serr() << "--" << g_strYulDialect << " and --" << g_strMachine << " ";
serr() << "are only valid in assembly mode." << endl;
return false;
}
solThrow(
CommandLineValidationError,
"--" + g_strYulDialect + " and --" + g_strMachine + " are only valid in assembly mode."
);
if (m_args.count(g_strMetadataHash))
{
@ -1217,10 +1163,7 @@ bool CommandLineParser::processArgs()
else if (hashStr == g_strNone)
m_options.metadata.hash = CompilerStack::MetadataHash::None;
else
{
serr() << "Invalid option for --" << g_strMetadataHash << ": " << hashStr << endl;
return false;
}
solThrow(CommandLineValidationError, "Invalid option for --" + g_strMetadataHash + ": " + hashStr);
}
if (m_args.count(g_strModelCheckerContracts))
@ -1228,10 +1171,7 @@ bool CommandLineParser::processArgs()
string contractsStr = m_args[g_strModelCheckerContracts].as<string>();
optional<ModelCheckerContracts> contracts = ModelCheckerContracts::fromString(contractsStr);
if (!contracts)
{
serr() << "Invalid option for --" << g_strModelCheckerContracts << ": " << contractsStr << endl;
return false;
}
solThrow(CommandLineValidationError, "Invalid option for --" + g_strModelCheckerContracts + ": " + contractsStr);
m_options.modelChecker.settings.contracts = move(*contracts);
}
@ -1243,10 +1183,7 @@ bool CommandLineParser::processArgs()
string engineStr = m_args[g_strModelCheckerEngine].as<string>();
optional<ModelCheckerEngine> engine = ModelCheckerEngine::fromString(engineStr);
if (!engine)
{
serr() << "Invalid option for --" << g_strModelCheckerEngine << ": " << engineStr << endl;
return false;
}
solThrow(CommandLineValidationError, "Invalid option for --" + g_strModelCheckerEngine + ": " + engineStr);
m_options.modelChecker.settings.engine = *engine;
}
@ -1255,10 +1192,7 @@ bool CommandLineParser::processArgs()
string invsStr = m_args[g_strModelCheckerInvariants].as<string>();
optional<ModelCheckerInvariants> invs = ModelCheckerInvariants::fromString(invsStr);
if (!invs)
{
serr() << "Invalid option for --" << g_strModelCheckerInvariants << ": " << invsStr << endl;
return false;
}
solThrow(CommandLineValidationError, "Invalid option for --" + g_strModelCheckerInvariants + ": " + invsStr);
m_options.modelChecker.settings.invariants = *invs;
}
@ -1270,10 +1204,7 @@ bool CommandLineParser::processArgs()
string solversStr = m_args[g_strModelCheckerSolvers].as<string>();
optional<smtutil::SMTSolverChoice> solvers = smtutil::SMTSolverChoice::fromString(solversStr);
if (!solvers)
{
serr() << "Invalid option for --" << g_strModelCheckerSolvers << ": " << solversStr << endl;
return false;
}
solThrow(CommandLineValidationError, "Invalid option for --" + g_strModelCheckerSolvers + ": " + solversStr);
m_options.modelChecker.settings.solvers = *solvers;
}
@ -1282,10 +1213,7 @@ bool CommandLineParser::processArgs()
string targetsStr = m_args[g_strModelCheckerTargets].as<string>();
optional<ModelCheckerTargets> targets = ModelCheckerTargets::fromString(targetsStr);
if (!targets)
{
serr() << "Invalid option for --" << g_strModelCheckerTargets << ": " << targetsStr << endl;
return false;
}
solThrow(CommandLineValidationError, "Invalid option for --" + g_strModelCheckerTargets + ": " + targetsStr);
m_options.modelChecker.settings.targets = *targets;
}
@ -1307,27 +1235,21 @@ bool CommandLineParser::processArgs()
m_options.input.errorRecovery = (m_args.count(g_strErrorRecovery) > 0);
solAssert(m_options.input.mode == InputMode::Compiler || m_options.input.mode == InputMode::CompilerWithASTImport);
return true;
}
bool CommandLineParser::parseCombinedJsonOption()
void CommandLineParser::parseCombinedJsonOption()
{
if (!m_args.count(g_strCombinedJson))
return true;
return;
set<string> requests;
for (string const& item: boost::split(requests, m_args[g_strCombinedJson].as<string>(), boost::is_any_of(",")))
if (CombinedJsonRequests::componentMap().count(item) == 0)
{
serr() << "Invalid option to --" << g_strCombinedJson << ": " << item << endl;
return false;
}
solThrow(CommandLineValidationError, "Invalid option to --" + g_strCombinedJson + ": " + item);
m_options.compiler.combinedJsonRequests = CombinedJsonRequests{};
for (auto&& [componentName, component]: CombinedJsonRequests::componentMap())
m_options.compiler.combinedJsonRequests.value().*component = (requests.count(componentName) > 0);
return true;
}
size_t CommandLineParser::countEnabledOptions(vector<string> const& _optionNames) const

View File

@ -234,27 +234,17 @@ struct CommandLineOptions
};
/// Parses the command-line arguments and produces a filled-out CommandLineOptions structure.
/// Validates provided values and prints error messages in case of errors.
/// Validates provided values and reports errors by throwing @p CommandLineValidationErrors.
class CommandLineParser
{
public:
explicit CommandLineParser(std::ostream& _serr):
m_serr(_serr)
{}
/// Parses the command-line arguments and fills out the internal CommandLineOptions structure.
/// Performs validation and prints error messages.
/// @return true if there were no validation errors when parsing options and the
/// CommandLineOptions structure has been fully initialized. false if there were errors - in
/// this case CommandLineOptions may be only partially filled out. May also return false if
/// there is not further processing necessary and the program should just exit.
bool parse(int _argc, char const* const* _argv);
/// @throws CommandLineValidationError if the arguments cannot be properly parsed or are invalid.
/// When an exception is thrown, the @p CommandLineOptions may be only partially filled out.
void parse(int _argc, char const* const* _argv);
CommandLineOptions const& options() const { return m_options; }
/// Returns true if the parser has written anything to any of its output streams.
bool hasOutput() const { return m_hasOutput; }
static void printHelp(std::ostream& _out) { _out << optionsDescription(); }
private:
@ -269,40 +259,32 @@ private:
/// Uses boost::program_options to parse the command-line arguments and leaves the result in @a m_args.
/// Also handles the arguments that result in information being printed followed by immediate exit.
/// @returns false if parsing fails due to syntactical errors or the arguments not matching the description.
bool parseArgs(int _argc, char const* const* _argv);
void parseArgs(int _argc, char const* const* _argv);
/// Validates parsed arguments stored in @a m_args and fills out the internal CommandLineOptions
/// structure.
/// @return false if there are any validation errors, true otherwise.
bool processArgs();
/// @throws CommandLineValidationError in case of validation errors.
void processArgs();
/// Parses the value supplied to --combined-json.
/// @return false if there are any validation errors, true otherwise.
bool parseCombinedJsonOption();
/// @throws CommandLineValidationError in case of validation errors.
void parseCombinedJsonOption();
/// Parses the names of the input files, remappings for all modes except for Standard JSON.
/// Does not check if files actually exist.
/// @return false if there are any validation errors, true otherwise.
bool parseInputPathsAndRemappings();
/// Parses the names of the input files, remappings. Does not check if the files actually exist.
/// @throws CommandLineValidationError in case of validation errors.
void parseInputPathsAndRemappings();
/// Tries to read from the file @a _input or interprets @a _input literally if that fails.
/// It then tries to parse the contents and appends to m_options.libraries.
/// @return false if there are any validation errors, true otherwise.
bool parseLibraryOption(std::string const& _input);
/// It then tries to parse the contents and appends to @a m_options.libraries.
/// @throws CommandLineValidationError in case of validation errors.
void parseLibraryOption(std::string const& _input);
bool parseOutputSelection();
void parseOutputSelection();
bool checkMutuallyExclusive(std::vector<std::string> const& _optionNames);
void checkMutuallyExclusive(std::vector<std::string> const& _optionNames);
size_t countEnabledOptions(std::vector<std::string> const& _optionNames) const;
static std::string joinOptionNames(std::vector<std::string> const& _optionNames, std::string _separator = ", ");
/// Returns the stream that should receive error output. Sets m_hasOutput to true if the
/// stream has ever been used.
std::ostream& serr();
std::ostream& m_serr;
bool m_hasOutput = false;
CommandLineOptions m_options;
/// Map of command-line arguments produced by boost::program_options.

34
solc/Exceptions.h Normal file
View File

@ -0,0 +1,34 @@
/*
This file is part of solidity.
solidity is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
solidity is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with solidity. If not, see <http://www.gnu.org/licenses/>.
*/
// SPDX-License-Identifier: GPL-3.0
/**
* Exceptions used by the command-line interface.
*/
#pragma once
#include <liblangutil/Exceptions.h>
namespace solidity::frontend
{
struct CommandLineError: virtual util::Exception {};
struct CommandLineExecutionError: virtual CommandLineError {};
struct CommandLineValidationError: virtual CommandLineError {};
struct CommandLineOutputError: virtual CommandLineError {};
}

View File

@ -62,12 +62,7 @@ int main(int argc, char** argv)
{
setDefaultOrCLocale();
solidity::frontend::CommandLineInterface cli(cin, cout, cerr);
bool success =
cli.parseArguments(argc, argv) &&
cli.readInputFiles() &&
cli.processInput();
return success ? 0 : 1;
return cli.run(argc, argv) ? 0 : 1;
}
catch (smtutil::SMTLogicError const& _exception)
{

View File

@ -182,7 +182,7 @@ void ExecutionFramework::sendMessage(bytes const& _data, bool _isCreation, u256
message.kind = EVMC_CALL;
message.destination = EVMHost::convertToEVMC(m_contractAddress);
}
message.gas = m_gas.convert_to<int64_t>();
message.gas = InitialGas.convert_to<int64_t>();
evmc::result result = m_evmcHost->call(message);
@ -190,7 +190,7 @@ void ExecutionFramework::sendMessage(bytes const& _data, bool _isCreation, u256
if (_isCreation)
m_contractAddress = EVMHost::convertFromEVMC(result.create_address);
m_gasUsed = m_gas - result.gas_left;
m_gasUsed = InitialGas - result.gas_left;
m_transactionSuccessful = (result.status_code == EVMC_SUCCESS);
if (m_showMessages)
@ -216,7 +216,7 @@ void ExecutionFramework::sendEther(h160 const& _addr, u256 const& _amount)
message.value = EVMHost::convertToEVMC(_amount);
message.kind = EVMC_CALL;
message.destination = EVMHost::convertToEVMC(_addr);
message.gas = m_gas.convert_to<int64_t>();
message.gas = InitialGas.convert_to<int64_t>();
m_evmcHost->call(message);
}

View File

@ -273,6 +273,9 @@ private:
}
protected:
u256 const GasPrice = 10 * gwei;
u256 const InitialGas = 100000000;
void selectVM(evmc_capabilities _cap = evmc_capabilities::EVMC_CAPABILITY_EVM1);
void reset();
@ -302,8 +305,6 @@ protected:
bool m_transactionSuccessful = true;
util::h160 m_sender = account(0);
util::h160 m_contractAddress;
u256 const m_gasPrice = 10 * gwei;
u256 const m_gas = 100000000;
bytes m_output;
u256 m_gasUsed;
};

View File

@ -54,7 +54,7 @@ do
shift
;;
*)
matching_tests=$(find . -mindepth 1 -maxdepth 1 -type d -name "$1" | cut --characters 3- | sort)
matching_tests=$(find . -mindepth 1 -maxdepth 1 -type d -name "$1" | cut -c 3- | sort)
if [[ $matching_tests == "" ]]
then
@ -308,6 +308,61 @@ function test_solc_assembly_output
fi
}
function test_via_ir_equivalence()
{
(( $# <= 2 )) || fail "This function accepts at most two arguments."
if [[ $2 != --optimize ]] && [[ $2 != "" ]]
then
fail "The second argument must be --optimize if present."
fi
local solidity_code="$1"
local optimize_flag="$2"
local optimizer_flags=()
[[ $optimize_flag == "" ]] || optimizer_flags+=("$optimize_flag")
local ir_output
ir_output=$(
echo "$solidity_code" |
msg_on_error --no-stderr "$SOLC" - --ir-optimized --debug-info location "${optimizer_flags[@]}" |
sed '/^Optimized IR:$/d'
)
local asm_output_two_stage asm_output_via_ir
asm_output_two_stage=$(
echo "$ir_output" |
msg_on_error --no-stderr "$SOLC" - --strict-assembly --asm "${optimizer_flags[@]}" |
sed '/^======= <stdin>/d' |
sed '/^Text representation:$/d'
)
asm_output_via_ir=$(
echo "$solidity_code" |
msg_on_error --no-stderr "$SOLC" - --experimental-via-ir --asm --debug-info location "${optimizer_flags[@]}" |
sed '/^======= <stdin>/d' |
sed '/^EVM assembly:$/d'
)
diff_values "$asm_output_two_stage" "$asm_output_via_ir" --ignore-space-change --ignore-blank-lines
local bin_output_two_stage bin_output_via_ir
bin_output_two_stage=$(
echo "$ir_output" |
msg_on_error --no-stderr "$SOLC" - --strict-assembly --bin "${optimizer_flags[@]}" |
sed '/^======= <stdin>/d' |
sed '/^Binary representation:$/d'
)
bin_output_via_ir=$(
echo "$solidity_code" |
msg_on_error --no-stderr "$SOLC" - --experimental-via-ir --bin "${optimizer_flags[@]}" |
sed '/^======= <stdin>/d' |
sed '/^Binary:$/d'
)
diff_values "$bin_output_two_stage" "$bin_output_via_ir" --ignore-space-change --ignore-blank-lines
}
## RUN
echo "Checking that the bug list is up to date..."
@ -533,6 +588,36 @@ printTask "Testing assemble, yul, strict-assembly and optimize..."
test_solc_assembly_output "{ let x := 0 }" "{ { } }" "--strict-assembly --optimize"
)
printTask "Testing the eqivalence of --experimental-via-ir and a two-stage compilation..."
(
printTask " - Smoke test"
test_via_ir_equivalence "contract C {}"
printTask " - Smoke test (optimized)"
test_via_ir_equivalence "contract C {}" --optimize
externalContracts=(
deposit_contract.sol
FixedFeeRegistrar.sol
_stringutils/stringutils.sol
)
requiresOptimizer=(
deposit_contract.sol
FixedFeeRegistrar.sol
)
for contractFile in "${externalContracts[@]}"
do
if ! [[ "${requiresOptimizer[*]}" =~ $contractFile ]]
then
printTask " - ${contractFile}"
test_via_ir_equivalence "$(cat "${REPO_ROOT}/test/libsolidity/semanticTests/externalContracts/${contractFile}")"
fi
printTask " - ${contractFile} (optimized)"
test_via_ir_equivalence "$(cat "${REPO_ROOT}/test/libsolidity/semanticTests/externalContracts/${contractFile}")" --optimize
done
)
printTask "Testing standard input..."
SOLTMPDIR=$(mktemp -d)

View File

@ -0,0 +1 @@
--strict-assembly --yul-dialect evm --machine ewasm --optimize --ewasm-ir

View File

@ -0,0 +1 @@
Warning: Yul is still experimental. Please use the output with care.

View File

@ -0,0 +1,4 @@
/// @use-src 0:"test.sol"
object "C" {
code { sstore(0,0) }
}

View File

@ -0,0 +1,34 @@
======= yul_to_wasm_source_location_crash/input.yul (Ewasm) =======
==========================
Translated source:
/// @use-src 0:"test.sol"
object "C" {
code {
function main()
{
let hi := i64.shl(i64.extend_i32_u(bswap32(i32.wrap_i64(0))), 32)
let y := i64.or(hi, i64.extend_i32_u(bswap32(i32.wrap_i64(i64.shr_u(0, 32)))))
i64.store(0:i32, y)
i64.store(i32.add(0:i32, 8:i32), y)
i64.store(i32.add(0:i32, 16:i32), y)
i64.store(i32.add(0:i32, 24:i32), y)
i64.store(32:i32, y)
i64.store(i32.add(32:i32, 8:i32), y)
i64.store(i32.add(32:i32, 16:i32), y)
i64.store(i32.add(32:i32, 24:i32), y)
eth.storageStore(0:i32, 32:i32)
}
function bswap16(x:i32) -> y:i32
{
y := i32.or(i32.and(i32.shl(x, 8:i32), 0xff00:i32), i32.and(i32.shr_u(x, 8:i32), 0xff:i32))
}
function bswap32(x:i32) -> y:i32
{
let hi:i32 := i32.shl(bswap16(x), 16:i32)
y := i32.or(hi, bswap16(i32.shr_u(x, 16:i32)))
}
}
}

View File

@ -28,25 +28,17 @@
set -e
if [ ! -f "$1" ]
then
echo "Usage: $0 <path to soljson.js>"
exit 1
fi
SOLJSON="$1"
REPO_ROOT="$(dirname "$0")"
source scripts/common.sh
source test/externalTests/common.sh
verify_input "$@"
printTask "Running external tests..."
"$REPO_ROOT/externalTests/zeppelin.sh" "$SOLJSON"
"$REPO_ROOT/externalTests/gnosis.sh" "$SOLJSON"
"$REPO_ROOT/externalTests/gnosis-v2.sh" "$SOLJSON"
"$REPO_ROOT/externalTests/colony.sh" "$SOLJSON"
"$REPO_ROOT/externalTests/ens.sh" "$SOLJSON"
# Disabled temporarily as it needs to be updated to latest Truffle first.
#test_truffle Gnosis https://github.com/axic/pm-contracts.git solidity-050
"$REPO_ROOT/externalTests/zeppelin.sh" "$@"
"$REPO_ROOT/externalTests/gnosis.sh" "$@"
"$REPO_ROOT/externalTests/gnosis-v2.sh" "$@"
"$REPO_ROOT/externalTests/colony.sh" "$@"
"$REPO_ROOT/externalTests/ens.sh" "$@"

View File

@ -24,8 +24,9 @@ set -e
source scripts/common.sh
source test/externalTests/common.sh
verify_input "$1"
SOLJSON="$1"
verify_input "$@"
BINARY_TYPE="$1"
BINARY_PATH="$2"
function compile_fn { yarn run provision:token:contracts; }
function test_fn { yarn run test:contracts; }
@ -38,11 +39,16 @@ function colony_test
local min_optimizer_level=3
local max_optimizer_level=3
setup_solcjs "$DIR" "$SOLJSON"
local selected_optimizer_levels
selected_optimizer_levels=$(circleci_select_steps "$(seq "$min_optimizer_level" "$max_optimizer_level")")
print_optimizer_levels_or_exit "$selected_optimizer_levels"
setup_solc "$DIR" "$BINARY_TYPE" "$BINARY_PATH"
download_project "$repo" "$branch" "$DIR"
[[ $BINARY_TYPE == native ]] && replace_global_solc "$BINARY_PATH"
neutralize_package_json_hooks
force_truffle_compiler_settings "$config_file" "${DIR}/solc" "$min_optimizer_level"
force_truffle_compiler_settings "$config_file" "$BINARY_TYPE" "${DIR}/solc" "$min_optimizer_level"
yarn
git submodule update --init
@ -52,10 +58,10 @@ function colony_test
cd ..
replace_version_pragmas
force_solc_modules "${DIR}/solc"
[[ $BINARY_TYPE == solcjs ]] && force_solc_modules "${DIR}/solc"
for level in $(seq "$min_optimizer_level" "$max_optimizer_level"); do
truffle_run_test "$config_file" "${DIR}/solc" "$level" compile_fn test_fn
for level in $selected_optimizer_levels; do
truffle_run_test "$config_file" "$BINARY_TYPE" "${DIR}/solc" "$level" compile_fn test_fn
done
}

View File

@ -24,59 +24,64 @@ set -e
CURRENT_EVM_VERSION=london
function print_optimizer_levels_or_exit
{
local selected_levels="$1"
[[ $selected_levels != "" ]] || { printWarning "No steps to run. Exiting."; exit 0; }
printLog "Selected optimizer levels: ${selected_levels}"
}
function verify_input
{
if [ ! -f "$1" ]; then
printError "Usage: $0 <path to soljson.js>"
exit 1
local binary_type="$1"
local binary_path="$2"
(( $# == 2 )) || fail "Usage: $0 native|solcjs <path to solc or soljson.js>"
[[ $binary_type == native || $binary_type == solcjs ]] || fail "Invalid binary type: '${binary_type}'. Must be either 'native' or 'solcjs'."
[[ -f "$binary_path" ]] || fail "The compiler binary does not exist at '${binary_path}'"
}
function setup_solc
{
local test_dir="$1"
local binary_type="$2"
local binary_path="$3"
local solcjs_branch="${4:-master}"
local install_dir="${5:-solc/}"
[[ $binary_type == native || $binary_type == solcjs ]] || assertFail
cd "$test_dir"
if [[ $binary_type == solcjs ]]
then
printLog "Setting up solc-js..."
git clone --depth 1 -b "$solcjs_branch" https://github.com/ethereum/solc-js.git "$install_dir"
pushd "$install_dir"
npm install
cp "$binary_path" soljson.js
SOLCVERSION=$(./solcjs --version)
popd
else
printLog "Setting up solc..."
SOLCVERSION=$("$binary_path" --version | tail -n 1 | sed -n -E 's/^Version: (.*)$/\1/p')
fi
}
function verify_version_input
{
if [ -z "$1" ] || [ ! -f "$1" ] || [ -z "$2" ]; then
printError "Usage: $0 <path to soljson.js> <version>"
exit 1
fi
}
function setup
{
local soljson="$1"
local branch="$2"
setup_solcjs "$DIR" "$soljson" "$branch" "solc"
cd solc
}
function setup_solcjs
{
local dir="$1"
local soljson="$2"
local branch="${3:-master}"
local path="${4:-solc/}"
cd "$dir"
printLog "Setting up solc-js..."
git clone --depth 1 -b "$branch" https://github.com/ethereum/solc-js.git "$path"
cd "$path"
npm install
cp "$soljson" soljson.js
SOLCVERSION=$(./solcjs --version)
printLog "Using solcjs version $SOLCVERSION"
cd ..
SOLCVERSION_SHORT=$(echo "$SOLCVERSION" | sed -En 's/^([0-9.]+).*\+commit\.[0-9a-f]+.*$/\1/p')
printLog "Using compiler version $SOLCVERSION"
}
function download_project
{
local repo="$1"
local branch="$2"
local dir="$3"
local solcjs_branch="$2"
local test_dir="$3"
printLog "Cloning $branch of $repo..."
git clone --depth 1 "$repo" -b "$branch" "$dir/ext"
printLog "Cloning $solcjs_branch of $repo..."
git clone --depth 1 "$repo" -b "$solcjs_branch" "$test_dir/ext"
cd ext
echo "Current commit hash: $(git rev-parse HEAD)"
}
@ -134,13 +139,19 @@ function force_solc_modules
function force_truffle_compiler_settings
{
local config_file="$1"
local solc_path="$2"
local level="$3"
local evm_version="${4:-"$CURRENT_EVM_VERSION"}"
local binary_type="$2"
local solc_path="$3"
local level="$4"
local evm_version="${5:-"$CURRENT_EVM_VERSION"}"
[[ $binary_type == native || $binary_type == solcjs ]] || assertFail
[[ $binary_type == native ]] && local solc_path="native"
printLog "Forcing Truffle compiler settings..."
echo "-------------------------------------"
echo "Config file: $config_file"
echo "Binary type: $binary_type"
echo "Compiler path: $solc_path"
echo "Optimization level: $level"
echo "Optimizer settings: $(optimizer_settings_for_level "$level")"
@ -152,6 +163,42 @@ function force_truffle_compiler_settings
echo "module.exports['compilers'] = $(truffle_compiler_settings "$solc_path" "$level" "$evm_version");" >> "$config_file"
}
function force_hardhat_compiler_binary
{
local config_file="$1"
local binary_type="$2"
local solc_path="$3"
printLog "Configuring Hardhat..."
echo "-------------------------------------"
echo "Config file: ${config_file}"
echo "Binary type: ${binary_type}"
echo "Compiler path: ${solc_path}"
hardhat_solc_build_subtask "$SOLCVERSION_SHORT" "$SOLCVERSION" "$binary_type" "$solc_path" >> "$config_file"
}
function force_hardhat_compiler_settings
{
local config_file="$1"
local level="$2"
local evm_version="${3:-"$CURRENT_EVM_VERSION"}"
printLog "Configuring Hardhat..."
echo "-------------------------------------"
echo "Config file: ${config_file}"
echo "Optimization level: ${level}"
echo "Optimizer settings: $(optimizer_settings_for_level "$level")"
echo "EVM version: ${evm_version}"
echo "Compiler version: ${SOLCVERSION_SHORT}"
echo "Compiler version (full): ${SOLCVERSION}"
echo "-------------------------------------"
{
echo -n 'module.exports["solidity"] = '
hardhat_compiler_settings "$SOLCVERSION_SHORT" "$level" "$evm_version"
} >> "$config_file"
}
function truffle_verify_compiler_version
{
local solc_version="$1"
@ -161,11 +208,26 @@ function truffle_verify_compiler_version
grep "$full_solc_version" --with-filename --recursive build/contracts || fail "Wrong compiler version detected."
}
function hardhat_verify_compiler_version
{
local solc_version="$1"
local full_solc_version="$2"
printLog "Verify that the correct version (${solc_version}/${full_solc_version}) of the compiler was used to compile the contracts..."
grep '"solcVersion": "'"${solc_version}"'"' --with-filename artifacts/build-info/*.json || fail "Wrong compiler version detected."
grep '"solcLongVersion": "'"${full_solc_version}"'"' --with-filename artifacts/build-info/*.json || fail "Wrong compiler version detected."
}
function truffle_clean
{
rm -rf build/
}
function hardhat_clean
{
rm -rf artifacts/ cache/
}
function run_test
{
local compile_fn="$1"
@ -189,12 +251,21 @@ function optimizer_settings_for_level
2) echo "{enabled: true}" ;;
3) echo "{enabled: true, details: {yul: true}}" ;;
*)
printError "Optimizer level not found. Please define OPTIMIZER_LEVEL=[1, 2, 3]"
exit 1
fail "Optimizer level not found. Please define OPTIMIZER_LEVEL=[1, 2, 3]"
;;
esac
}
function replace_global_solc
{
local solc_path="$1"
[[ ! -e solc ]] || fail "A file named 'solc' already exists in '${PWD}'."
ln -s "$solc_path" solc
export PATH="$PWD:$PATH"
}
function truffle_compiler_settings
{
local solc_path="$1"
@ -212,6 +283,45 @@ function truffle_compiler_settings
echo "}"
}
function hardhat_solc_build_subtask {
local solc_version="$1"
local full_solc_version="$2"
local binary_type="$3"
local solc_path="$4"
[[ $binary_type == native || $binary_type == solcjs ]] || assertFail
[[ $binary_type == native ]] && local is_solcjs=false
[[ $binary_type == solcjs ]] && local is_solcjs=true
echo "const {TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD} = require('hardhat/builtin-tasks/task-names');"
echo "const assert = require('assert');"
echo
echo "subtask(TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD, async (args, hre, runSuper) => {"
echo " assert(args.solcVersion == '${solc_version}', 'Unexpected solc version: ' + args.solcVersion)"
echo " return {"
echo " compilerPath: '$(realpath "$solc_path")',"
echo " isSolcJs: ${is_solcjs},"
echo " version: args.solcVersion,"
echo " longVersion: '${full_solc_version}'"
echo " }"
echo "})"
}
function hardhat_compiler_settings {
local solc_version="$1"
local level="$2"
local evm_version="$3"
echo "{"
echo " version: '${solc_version}',"
echo " settings: {"
echo " optimizer: $(optimizer_settings_for_level "$level"),"
echo " evmVersion: '${evm_version}'"
echo " }"
echo "}"
}
function compile_and_run_test
{
local compile_fn="$1"
@ -233,16 +343,29 @@ function compile_and_run_test
function truffle_run_test
{
local config_file="$1"
local solc_path="$2"
local optimizer_level="$3"
local compile_fn="$4"
local test_fn="$5"
local binary_type="$2"
local solc_path="$3"
local optimizer_level="$4"
local compile_fn="$5"
local test_fn="$6"
truffle_clean
force_truffle_compiler_settings "$config_file" "$solc_path" "$optimizer_level"
force_truffle_compiler_settings "$config_file" "$binary_type" "$solc_path" "$optimizer_level"
compile_and_run_test compile_fn test_fn truffle_verify_compiler_version
}
function hardhat_run_test
{
local config_file="$1"
local optimizer_level="$2"
local compile_fn="$3"
local test_fn="$4"
hardhat_clean
force_hardhat_compiler_settings "$config_file" "$optimizer_level"
compile_and_run_test compile_fn test_fn hardhat_verify_compiler_version
}
function external_test
{
local name="$1"
@ -252,10 +375,7 @@ function external_test
echo "==========================="
DIR=$(mktemp -d -t "ext-test-${name}-XXXXXX")
(
if [ -z "$main_fn" ]; then
printError "Test main function not defined."
exit 1
fi
[[ "$main_fn" != "" ]] || fail "Test main function not defined."
$main_fn
)
rm -rf "$DIR"

View File

@ -24,8 +24,9 @@ set -e
source scripts/common.sh
source test/externalTests/common.sh
verify_input "$1"
export SOLJSON="$1"
verify_input "$@"
BINARY_TYPE="$1"
BINARY_PATH="$2"
function compile_fn { npx truffle compile; }
function test_fn { npm run test; }
@ -38,22 +39,27 @@ function ens_test
local min_optimizer_level=1
local max_optimizer_level=3
setup_solcjs "$DIR" "$SOLJSON"
local selected_optimizer_levels
selected_optimizer_levels=$(circleci_select_steps "$(seq "$min_optimizer_level" "$max_optimizer_level")")
print_optimizer_levels_or_exit "$selected_optimizer_levels"
setup_solc "$DIR" "$BINARY_TYPE" "$BINARY_PATH"
download_project "$repo" "$branch" "$DIR"
[[ $BINARY_TYPE == native ]] && replace_global_solc "$BINARY_PATH"
# Use latest Truffle. Older versions crash on the output from 0.8.0.
force_truffle_version ^5.1.55
neutralize_package_lock
neutralize_package_json_hooks
force_truffle_compiler_settings "$config_file" "${DIR}/solc" "$min_optimizer_level"
force_truffle_compiler_settings "$config_file" "$BINARY_TYPE" "${DIR}/solc" "$min_optimizer_level"
npm install
replace_version_pragmas
force_solc_modules "${DIR}/solc"
[[ $BINARY_TYPE == solcjs ]] && force_solc_modules "${DIR}/solc"
for level in $(seq "$min_optimizer_level" "$max_optimizer_level"); do
truffle_run_test "$config_file" "${DIR}/solc" "$level" compile_fn test_fn
for level in $selected_optimizer_levels; do
truffle_run_test "$config_file" "$BINARY_TYPE" "${DIR}/solc" "$level" compile_fn test_fn
done
}

View File

@ -24,10 +24,10 @@ set -e
source scripts/common.sh
source test/externalTests/common.sh
verify_input "$1"
SOLJSON="$1"
verify_input "$@"
BINARY_TYPE="$1"
BINARY_PATH="$2"
function install_fn { npm install --package-lock; }
function compile_fn { npx truffle compile; }
function test_fn { npm test; }
@ -39,22 +39,27 @@ function gnosis_safe_test
local min_optimizer_level=2
local max_optimizer_level=3
setup_solcjs "$DIR" "$SOLJSON"
local selected_optimizer_levels
selected_optimizer_levels=$(circleci_select_steps "$(seq "$min_optimizer_level" "$max_optimizer_level")")
print_optimizer_levels_or_exit "$selected_optimizer_levels"
setup_solc "$DIR" "$BINARY_TYPE" "$BINARY_PATH"
download_project "$repo" "$branch" "$DIR"
[[ $BINARY_TYPE == native ]] && replace_global_solc "$BINARY_PATH"
sed -i 's|github:gnosis/mock-contract#sol_0_5_0|github:solidity-external-tests/mock-contract#master_080|g' package.json
sed -i -E 's|"@gnosis.pm/util-contracts": "[^"]+"|"@gnosis.pm/util-contracts": "github:solidity-external-tests/util-contracts#solc-7_080"|g' package.json
neutralize_package_lock
neutralize_package_json_hooks
force_truffle_compiler_settings "$config_file" "${DIR}/solc" "$min_optimizer_level"
force_truffle_compiler_settings "$config_file" "$BINARY_TYPE" "${DIR}/solc" "$min_optimizer_level"
npm install --package-lock
replace_version_pragmas
force_solc_modules "${DIR}/solc"
[[ $BINARY_TYPE == solcjs ]] && force_solc_modules "${DIR}/solc"
for level in $(seq "$min_optimizer_level" "$max_optimizer_level"); do
truffle_run_test "$config_file" "${DIR}/solc" "$level" compile_fn test_fn
for level in $selected_optimizer_levels; do
truffle_run_test "$config_file" "$BINARY_TYPE" "${DIR}/solc" "$level" compile_fn test_fn
done
}

View File

@ -24,8 +24,9 @@ set -e
source scripts/common.sh
source test/externalTests/common.sh
verify_input "$1"
SOLJSON="$1"
verify_input "$@"
BINARY_TYPE="$1"
BINARY_PATH="$2"
function compile_fn { npx truffle compile; }
function test_fn { npm test; }
@ -38,21 +39,26 @@ function gnosis_safe_test
local min_optimizer_level=2
local max_optimizer_level=3
setup_solcjs "$DIR" "$SOLJSON"
local selected_optimizer_levels
selected_optimizer_levels=$(circleci_select_steps "$(seq "$min_optimizer_level" "$max_optimizer_level")")
print_optimizer_levels_or_exit "$selected_optimizer_levels"
setup_solc "$DIR" "$BINARY_TYPE" "$BINARY_PATH"
download_project "$repo" "$branch" "$DIR"
[[ $BINARY_TYPE == native ]] && replace_global_solc "$BINARY_PATH"
sed -i 's|github:gnosis/mock-contract#sol_0_5_0|github:solidity-external-tests/mock-contract#master_080|g' package.json
neutralize_package_lock
neutralize_package_json_hooks
force_truffle_compiler_settings "$config_file" "${DIR}/solc" "$min_optimizer_level"
force_truffle_compiler_settings "$config_file" "$BINARY_TYPE" "${DIR}/solc" "$min_optimizer_level"
npm install --package-lock
replace_version_pragmas
force_solc_modules "${DIR}/solc"
[[ $BINARY_TYPE == solcjs ]] && force_solc_modules "${DIR}/solc"
for level in $(seq "$min_optimizer_level" "$max_optimizer_level"); do
truffle_run_test "$config_file" "${DIR}/solc" "$level" compile_fn test_fn
for level in $selected_optimizer_levels; do
truffle_run_test "$config_file" "$BINARY_TYPE" "${DIR}/solc" "$level" compile_fn test_fn
done
}

View File

@ -24,10 +24,11 @@ set -e
source scripts/common.sh
source test/externalTests/common.sh
verify_version_input "$1" "$2"
SOLJSON="$1"
VERSION="$2"
[[ $SOLJSON != "" && -f "$SOLJSON" && $VERSION != "" ]] || fail "Usage: $0 <path to soljson.js> <version>"
function compile_fn { echo "Nothing to compile."; }
function test_fn { npm test; }
@ -37,7 +38,8 @@ function solcjs_test
SOLCJS_INPUT_DIR="$TEST_DIR"/test/externalTests/solc-js
# set up solc-js on the branch specified
setup "$SOLJSON" master
setup_solc "$DIR" solcjs "$SOLJSON" master solc/
cd solc/
printLog "Updating index.js file..."
echo "require('./determinism.js');" >> test/index.js

View File

@ -24,32 +24,37 @@ set -e
source scripts/common.sh
source test/externalTests/common.sh
verify_input "$1"
SOLJSON="$1"
verify_input "$@"
BINARY_TYPE="$1"
BINARY_PATH="$2"
function compile_fn { npx truffle compile; }
function test_fn { npm run test; }
function compile_fn { npm run compile; }
function test_fn { npm test; }
function zeppelin_test
{
local repo="https://github.com/OpenZeppelin/openzeppelin-contracts.git"
local branch=master
local config_file="truffle-config.js"
local config_file="hardhat.config.js"
local min_optimizer_level=1
local max_optimizer_level=3
setup_solcjs "$DIR" "$SOLJSON"
local selected_optimizer_levels
selected_optimizer_levels=$(circleci_select_steps "$(seq "$min_optimizer_level" "$max_optimizer_level")")
print_optimizer_levels_or_exit "$selected_optimizer_levels"
setup_solc "$DIR" "$BINARY_TYPE" "$BINARY_PATH"
download_project "$repo" "$branch" "$DIR"
neutralize_package_json_hooks
force_truffle_compiler_settings "$config_file" "${DIR}/solc" "$min_optimizer_level"
force_hardhat_compiler_binary "$config_file" "$BINARY_TYPE" "$BINARY_PATH"
force_hardhat_compiler_settings "$config_file" "$min_optimizer_level"
npm install
replace_version_pragmas
force_solc_modules "${DIR}/solc"
for level in $(seq "$min_optimizer_level" "$max_optimizer_level"); do
truffle_run_test "$config_file" "${DIR}/solc" "$level" compile_fn test_fn
for level in $selected_optimizer_levels; do
hardhat_run_test "$config_file" "$level" compile_fn test_fn
done
}

View File

@ -28,6 +28,33 @@
#include <boost/test/unit_test.hpp>
using namespace std;
using namespace solidity::test;
namespace boost::test_tools::tt_detail
{
template<>
struct print_log_value<std::optional<int>>
{
void operator()(std::ostream& _out, std::optional<int> const& _value) const
{
_out << (_value ? to_string(*_value) : "[nullopt]");
}
};
template<>
struct print_log_value<nullopt_t>
{
void operator()(std::ostream& _out, nullopt_t const&) const
{
_out << "[nullopt]";
}
};
} // namespace boost::test_tools::tt_detail
namespace solidity::langutil::test
{
@ -48,6 +75,62 @@ BOOST_AUTO_TEST_CASE(test_fail)
);
}
namespace
{
std::optional<int> toPosition(int _line, int _column, string const& _text)
{
return CharStream{_text, "source"}.translateLineColumnToPosition(
LineColumn{_line, _column}
);
}
}
BOOST_AUTO_TEST_CASE(translateLineColumnToPosition)
{
BOOST_CHECK_EQUAL(toPosition(-1, 0, "ABC"), nullopt);
BOOST_CHECK_EQUAL(toPosition(0, -1, "ABC"), nullopt);
BOOST_CHECK_EQUAL(toPosition(0, 0, ""), 0);
BOOST_CHECK_EQUAL(toPosition(1, 0, ""), nullopt);
BOOST_CHECK_EQUAL(toPosition(0, 1, ""), nullopt);
// With last line containing no LF
BOOST_CHECK_EQUAL(toPosition(0, 0, "ABC"), 0);
BOOST_CHECK_EQUAL(toPosition(0, 1, "ABC"), 1);
BOOST_CHECK_EQUAL(toPosition(0, 2, "ABC"), 2);
BOOST_CHECK_EQUAL(toPosition(0, 3, "ABC"), 3);
BOOST_CHECK_EQUAL(toPosition(0, 4, "ABC"), nullopt);
BOOST_CHECK_EQUAL(toPosition(1, 0, "ABC"), nullopt);
BOOST_CHECK_EQUAL(toPosition(0, 3, "ABC\nDEF"), 3);
BOOST_CHECK_EQUAL(toPosition(0, 4, "ABC\nDEF"), nullopt);
BOOST_CHECK_EQUAL(toPosition(1, 0, "ABC\nDEF"), 4);
BOOST_CHECK_EQUAL(toPosition(1, 1, "ABC\nDEF"), 5);
BOOST_CHECK_EQUAL(toPosition(1, 2, "ABC\nDEF"), 6);
BOOST_CHECK_EQUAL(toPosition(1, 3, "ABC\nDEF"), 7);
BOOST_CHECK_EQUAL(toPosition(1, 4, "ABC\nDEF"), nullopt);
BOOST_CHECK_EQUAL(toPosition(2, 0, "ABC\nDEF"), nullopt);
BOOST_CHECK_EQUAL(toPosition(2, 1, "ABC\nDEF"), nullopt);
// With last line containing LF
BOOST_CHECK_EQUAL(toPosition(0, 0, "ABC\nDEF\n"), 0);
BOOST_CHECK_EQUAL(toPosition(0, 1, "ABC\nDEF\n"), 1);
BOOST_CHECK_EQUAL(toPosition(0, 2, "ABC\nDEF\n"), 2);
BOOST_CHECK_EQUAL(toPosition(1, 0, "ABC\nDEF\n"), 4);
BOOST_CHECK_EQUAL(toPosition(1, 1, "ABC\nDEF\n"), 5);
BOOST_CHECK_EQUAL(toPosition(1, 2, "ABC\nDEF\n"), 6);
BOOST_CHECK_EQUAL(toPosition(1, 3, "ABC\nDEF\n"), 7);
BOOST_CHECK_EQUAL(toPosition(1, 4, "ABC\nDEF\n"), nullopt);
BOOST_CHECK_EQUAL(toPosition(2, 0, "ABC\nDEF\n"), 8);
BOOST_CHECK_EQUAL(toPosition(2, 1, "ABC\nDEF\n"), nullopt);
BOOST_CHECK_EQUAL(toPosition(2, 0, "ABC\nDEF\nGHI\n"), 8);
BOOST_CHECK_EQUAL(toPosition(2, 1, "ABC\nDEF\nGHI\n"), 9);
BOOST_CHECK_EQUAL(toPosition(2, 2, "ABC\nDEF\nGHI\n"), 10);
}
BOOST_AUTO_TEST_SUITE_END()
} // end namespaces
}

View File

@ -16,17 +16,20 @@
*/
// SPDX-License-Identifier: GPL-3.0
#include <boost/algorithm/string/replace.hpp>
#include <test/libsolidity/ASTJSONTest.h>
#include <test/Common.h>
#include <libsolutil/AnsiColorized.h>
#include <liblangutil/SourceReferenceFormatter.h>
#include <libsolidity/ast/ASTJsonConverter.h>
#include <libsolidity/interface/CompilerStack.h>
#include <libsolutil/AnsiColorized.h>
#include <libsolutil/CommonIO.h>
#include <test/Common.h>
#include <test/libsolidity/ASTJSONTest.h>
#include <boost/algorithm/string.hpp>
#include <boost/algorithm/string/predicate.hpp>
#include <boost/throw_exception.hpp>
#include <boost/algorithm/string/replace.hpp>
#include <boost/test/unit_test.hpp>
#include <boost/throw_exception.hpp>
#include <fstream>
#include <memory>
#include <stdexcept>
@ -72,8 +75,13 @@ ASTJSONTest::ASTJSONTest(string const& _filename)
if (!boost::algorithm::ends_with(_filename, ".sol"))
BOOST_THROW_EXCEPTION(runtime_error("Invalid test contract file name: \"" + _filename + "\"."));
m_astFilename = _filename.substr(0, _filename.size() - 4) + ".json";
m_astParseOnlyFilename = _filename.substr(0, _filename.size() - 4) + "_parseOnly.json";
string_view baseName = _filename;
baseName.remove_suffix(4);
m_variants = {
TestVariant(baseName, CompilerStack::State::Parsed),
TestVariant(baseName, CompilerStack::State::AnalysisPerformed),
};
ifstream file(_filename);
if (!file)
@ -102,26 +110,13 @@ ASTJSONTest::ASTJSONTest(string const& _filename)
}
m_sources.emplace_back(sourceName.empty() ? "a" : sourceName, source);
file.close();
file.open(m_astFilename);
if (file)
for (TestVariant& variant: m_variants)
{
string line;
while (getline(file, line))
m_expectation += line + "\n";
variant.expectation = readFileAsString(variant.astFilename());
boost::replace_all(variant.expectation, "\r\n", "\n");
}
file.close();
file.open(m_astParseOnlyFilename);
if (file)
{
string line;
while (getline(file, line))
m_expectationParseOnly += line + "\n";
}
file.close();
}
TestCase::TestResult ASTJSONTest::run(ostream& _stream, string const& _linePrefix, bool const _formatted)
@ -135,97 +130,76 @@ TestCase::TestResult ASTJSONTest::run(ostream& _stream, string const& _linePrefi
sources[m_sources[i].first] = m_sources[i].second;
sourceIndices[m_sources[i].first] = static_cast<unsigned>(i + 1);
}
c.setSources(sources);
c.setEVMVersion(solidity::test::CommonOptions::get().evmVersion());
bool resultsMatch = true;
if (!c.compile(CompilerStack::State::Parsed))
for (TestVariant& variant: m_variants)
{
SourceReferenceFormatter formatter(_stream, c, _formatted, false);
formatter.printErrorInformation(c.errors());
return TestResult::FatalError;
c.reset();
c.setSources(sources);
c.setEVMVersion(solidity::test::CommonOptions::get().evmVersion());
if (!c.parseAndAnalyze(variant.stopAfter))
{
// Ignore non-fatal analysis errors, we only want to export.
if (c.state() > CompilerStack::State::Parsed)
continue;
SourceReferenceFormatter formatter(_stream, c, _formatted, false);
formatter.printErrorInformation(c.errors());
return TestResult::FatalError;
}
resultsMatch = resultsMatch && runTest(
variant,
sourceIndices,
c,
_stream,
_linePrefix,
_formatted
);
}
bool resultsMatch = runTest(
m_expectationParseOnly,
m_resultParseOnly,
sourceIndices,
c,
"parseOnly",
_stream,
_linePrefix,
_formatted
);
c.reset();
c.setSources(sources);
c.setEVMVersion(solidity::test::CommonOptions::get().evmVersion());
if (!c.parse())
{
// Empty Expectations means we expect failure
if (m_expectation.empty())
return resultsMatch ? TestResult::Success : TestResult::Failure;
SourceReferenceFormatter{_stream, c, _formatted, false}
.printErrorInformation(c.errors());
return TestResult::FatalError;
}
c.analyze();
resultsMatch = runTest(
m_expectation,
m_result,
sourceIndices,
c,
"",
_stream,
_linePrefix,
_formatted
) && resultsMatch;
return resultsMatch ? TestResult::Success : TestResult::Failure;
}
bool ASTJSONTest::runTest(
string& _expectation,
string& _result,
TestVariant& _variant,
map<string, unsigned> const& _sourceIndices,
CompilerStack& _compiler,
string const& _variation,
ostream& _stream,
string const& _linePrefix,
bool const _formatted
)
{
if (m_sources.size() > 1)
_result += "[\n";
_variant.result += "[\n";
for (size_t i = 0; i < m_sources.size(); i++)
{
ostringstream result;
ASTJsonConverter(_compiler.state(), _sourceIndices).print(result, _compiler.ast(m_sources[i].first));
_result += result.str();
_variant.result += result.str();
if (i != m_sources.size() - 1)
_result += ",";
_result += "\n";
_variant.result += ",";
_variant.result += "\n";
}
if (m_sources.size() > 1)
_result += "]\n";
_variant.result += "]\n";
replaceTagWithVersion(_expectation);
replaceTagWithVersion(_variant.expectation);
if (_expectation != _result)
if (_variant.expectation != _variant.result)
{
string nextIndentLevel = _linePrefix + " ";
AnsiColorized(_stream, _formatted, {BOLD, CYAN}) <<
_linePrefix <<
"Expected result" <<
(!_variation.empty() ? " (" + _variation + "):" : ":") <<
(!_variant.name().empty() ? " (" + _variant.name() + "):" : ":") <<
endl;
{
istringstream stream(_expectation);
istringstream stream(_variant.expectation);
string line;
while (getline(stream, line))
_stream << nextIndentLevel << line << endl;
@ -235,10 +209,10 @@ bool ASTJSONTest::runTest(
AnsiColorized(_stream, _formatted, {BOLD, CYAN}) <<
_linePrefix <<
"Obtained result" <<
(!_variation.empty() ? " (" + _variation + "):" : ":") <<
(!_variant.name().empty() ? " (" + _variant.name() + "):" : ":") <<
endl;
{
istringstream stream(_result);
istringstream stream(_variant.result);
string line;
while (getline(stream, line))
_stream << nextIndentLevel << line << endl;
@ -266,14 +240,18 @@ void ASTJSONTest::printSource(ostream& _stream, string const& _linePrefix, bool
void ASTJSONTest::printUpdatedExpectations(std::ostream&, std::string const&) const
{
updateExpectation(m_astFilename, m_result, "");
updateExpectation(m_astParseOnlyFilename, m_resultParseOnly, "parseOnly ");
for (TestVariant const& variant: m_variants)
updateExpectation(
variant.astFilename(),
variant.result,
variant.name().empty() ? "" : variant.name() + " "
);
}
void ASTJSONTest::updateExpectation(string const& _filename, string const& _expectation, string const& _variation) const
void ASTJSONTest::updateExpectation(string const& _filename, string const& _expectation, string const& _variant) const
{
ofstream file(_filename.c_str());
if (!file) BOOST_THROW_EXCEPTION(runtime_error("Cannot write " + _variation + "AST expectation to \"" + _filename + "\"."));
if (!file) BOOST_THROW_EXCEPTION(runtime_error("Cannot write " + _variant + "AST expectation to \"" + _filename + "\"."));
file.exceptions(ios::badbit);
string replacedResult = _expectation;

View File

@ -19,6 +19,7 @@
#pragma once
#include <libsolutil/AnsiColorized.h>
#include <libsolidity/interface/CompilerStack.h>
#include <test/TestCase.h>
#include <iosfwd>
@ -37,6 +38,32 @@ namespace solidity::frontend::test
class ASTJSONTest: public TestCase
{
public:
struct TestVariant
{
TestVariant(std::string_view _baseName, CompilerStack::State _stopAfter):
baseName(_baseName),
stopAfter(_stopAfter)
{}
std::string name() const
{
return stopAfter == CompilerStack::State::Parsed ? "parseOnly" : "";
}
std::string astFilename() const
{
return std::string(baseName) +
(name().empty() ? "" : "_") +
name() +
".json";
}
std::string baseName;
CompilerStack::State stopAfter;
std::string result;
std::string expectation;
};
static std::unique_ptr<TestCase> create(Config const& _config)
{
return std::make_unique<ASTJSONTest>(_config.filename);
@ -49,11 +76,9 @@ public:
void printUpdatedExpectations(std::ostream& _stream, std::string const& _linePrefix) const override;
private:
bool runTest(
std::string& _expectation,
std::string& _result,
TestVariant& _testVariant,
std::map<std::string, unsigned> const& _sourceIndices,
CompilerStack& _compiler,
std::string const& _variation,
std::ostream& _stream,
std::string const& _linePrefix = "",
bool const _formatted = false
@ -61,15 +86,12 @@ private:
void updateExpectation(
std::string const& _filename,
std::string const& _expectation,
std::string const& _variation
std::string const& _variant
) const;
std::vector<TestVariant> m_variants;
std::vector<std::pair<std::string, std::string>> m_sources;
std::string m_expectationParseOnly;
std::string m_astFilename;
std::string m_astParseOnlyFilename;
std::string m_result;
std::string m_resultParseOnly;
};
}

View File

@ -100,7 +100,7 @@ ErrorList AnalysisFramework::filterErrors(ErrorList const& _errorList, bool _inc
for (auto const& messagePrefix: m_messagesToCut)
if (currentError->comment()->find(messagePrefix) == 0)
{
SourceLocation const* location = boost::get_error_info<errinfo_sourceLocation>(*currentError);
SourceLocation const* location = currentError->sourceLocation();
// sufficient for now, but in future we might clone the error completely, including the secondary location
newError = make_shared<Error>(
currentError->errorId(),

View File

@ -407,7 +407,10 @@ TestCase::TestResult SemanticTest::runTest(
if (m_transactionSuccessful == test.call().expectations.failure)
success = false;
if (success && !checkGasCostExpectation(test, _isYulRun))
{
success = false;
m_gasCostFailure = true;
}
test.setFailure(!m_transactionSuccessful);
test.setRawBytes(bytes());
@ -562,14 +565,14 @@ bool SemanticTest::checkGasCostExpectation(TestFunctionCall& io_test, bool _comp
// We don't check gas if enforce gas cost is not active
// or test is run with abi encoder v1 only
// or gas used less than threshold for enforcing feature
// or the test has used up all available gas (test will fail anyway)
// or setting is "ir" and it's not included in expectations
// or if the called function is an isoltest builtin e.g. `smokeTest` or `storageEmpty`
if (
!m_enforceGasCost ||
(
(setting == "ir" || m_gasUsed < m_enforceGasCostMinValue || m_gasUsed >= m_gas) &&
io_test.call().expectations.gasUsed.count(setting) == 0
) ||
m_gasUsed < m_enforceGasCostMinValue ||
m_gasUsed >= InitialGas ||
(setting == "ir" && io_test.call().expectations.gasUsed.count(setting) == 0) ||
io_test.call().kind == FunctionCall::Kind::Builtin
)
return true;

View File

@ -1607,30 +1607,6 @@ BOOST_AUTO_TEST_CASE(library_call_protection)
)
}
BOOST_AUTO_TEST_CASE(library_staticcall_delegatecall)
{
char const* sourceCode = R"(
library Lib {
function x() public view returns (uint) {
return 1;
}
}
contract Test {
uint t;
function f() public returns (uint) {
t = 2;
return this.g();
}
function g() public view returns (uint) {
return Lib.x();
}
}
)";
compileAndRun(sourceCode, 0, "Lib");
compileAndRun(sourceCode, 0, "Test", bytes(), map<string, h160>{{":Lib", m_contractAddress}});
ABI_CHECK(callContractFunction("f()"), encodeArgs(1));
}
BOOST_AUTO_TEST_CASE(bytes_from_calldata_to_memory)
{
char const* sourceCode = R"(
@ -1786,49 +1762,6 @@ BOOST_AUTO_TEST_CASE(copy_from_calldata_removes_bytes_data)
);
}
BOOST_AUTO_TEST_CASE(storing_invalid_boolean)
{
char const* sourceCode = R"(
contract C {
event Ev(bool);
bool public perm;
function set() public returns(uint) {
bool tmp;
assembly {
tmp := 5
}
perm = tmp;
return 1;
}
function ret() public returns(bool) {
bool tmp;
assembly {
tmp := 5
}
return tmp;
}
function ev() public returns(uint) {
bool tmp;
assembly {
tmp := 5
}
emit Ev(tmp);
return 1;
}
}
)";
compileAndRun(sourceCode);
ABI_CHECK(callContractFunction("set()"), encodeArgs(1));
ABI_CHECK(callContractFunction("perm()"), encodeArgs(1));
ABI_CHECK(callContractFunction("ret()"), encodeArgs(1));
ABI_CHECK(callContractFunction("ev()"), encodeArgs(1));
BOOST_REQUIRE_EQUAL(numLogs(), 1);
BOOST_CHECK_EQUAL(logAddress(0), m_contractAddress);
BOOST_CHECK(logData(0) == encodeArgs(1));
BOOST_REQUIRE_EQUAL(numLogTopics(0), 1);
BOOST_CHECK_EQUAL(logTopic(0, 0), util::keccak256(string("Ev(bool)")));
}
BOOST_AUTO_TEST_CASE(struct_referencing)
{
static char const* sourceCode = R"(
@ -2059,70 +1992,6 @@ BOOST_AUTO_TEST_CASE(array_copy_storage_abi)
// ABI_CHECK(callContractFunction("f()"), encodeArgs(5));
//}
BOOST_AUTO_TEST_CASE(packed_storage_structs_delete)
{
char const* sourceCode = R"(
contract C {
struct str { uint8 a; uint16 b; uint8 c; }
uint8 x;
uint16 y;
str data;
function test() public returns (uint) {
x = 1;
y = 2;
data.a = 2;
data.b = 0xabcd;
data.c = 0xfa;
if (x != 1 || y != 2 || data.a != 2 || data.b != 0xabcd || data.c != 0xfa)
return 2;
delete y;
delete data.b;
if (x != 1 || y != 0 || data.a != 2 || data.b != 0 || data.c != 0xfa)
return 3;
delete x;
delete data;
return 1;
}
}
)";
compileAndRun(sourceCode);
ABI_CHECK(callContractFunction("test()"), encodeArgs(1));
BOOST_CHECK(storageEmpty(m_contractAddress));
}
BOOST_AUTO_TEST_CASE(invalid_enum_logged)
{
char const* sourceCode = R"(
contract C {
enum X { A, B }
event Log(X);
function test_log() public returns (uint) {
X garbled = X.A;
assembly {
garbled := 5
}
emit Log(garbled);
return 1;
}
function test_log_ok() public returns (uint) {
X x = X.A;
emit Log(x);
return 1;
}
}
)";
compileAndRun(sourceCode, 0, "C");
ABI_CHECK(callContractFunction("test_log_ok()"), encodeArgs(u256(1)));
BOOST_REQUIRE_EQUAL(numLogs(), 1);
BOOST_CHECK_EQUAL(logAddress(0), m_contractAddress);
BOOST_REQUIRE_EQUAL(numLogTopics(0), 1);
BOOST_REQUIRE_EQUAL(logTopic(0, 0), util::keccak256(string("Log(uint8)")));
BOOST_CHECK_EQUAL(h256(logData(0)), h256(u256(0)));
ABI_CHECK(callContractFunction("test_log()"), panicData(PanicCode::EnumConversionError));
}
BOOST_AUTO_TEST_CASE(evm_exceptions_in_constructor_out_of_baund)
{
char const* sourceCode = R"(
@ -2164,31 +2033,6 @@ BOOST_AUTO_TEST_CASE(failing_send)
BOOST_REQUIRE(callContractFunction("callHelper(address)", c_helperAddress) == encodeArgs(true, 20));
}
BOOST_AUTO_TEST_CASE(return_string)
{
char const* sourceCode = R"(
contract Main {
string public s;
function set(string calldata _s) external {
s = _s;
}
function get1() public returns (string memory r) {
return s;
}
function get2() public returns (string memory r) {
r = s;
}
}
)";
compileAndRun(sourceCode, 0, "Main");
string s("Julia");
bytes args = encodeArgs(u256(0x20), u256(s.length()), s);
BOOST_REQUIRE(callContractFunction("set(string)", asString(args)) == encodeArgs());
ABI_CHECK(callContractFunction("get1()"), args);
ABI_CHECK(callContractFunction("get2()"), args);
ABI_CHECK(callContractFunction("s()"), args);
}
BOOST_AUTO_TEST_CASE(return_multiple_strings_of_various_sizes)
{
char const* sourceCode = R"(
@ -2343,28 +2187,6 @@ BOOST_AUTO_TEST_CASE(return_bytes_internal)
}
}
BOOST_AUTO_TEST_CASE(memory_types_initialisation)
{
char const* sourceCode = R"(
contract Test {
mapping(uint=>uint) data;
function stat() public returns (uint[5] memory)
{
data[2] = 3; // make sure to use some memory
}
function dyn() public returns (uint[] memory) { stat(); }
function nested() public returns (uint[3][] memory) { stat(); }
function nestedStat() public returns (uint[3][7] memory) { stat(); }
}
)";
compileAndRun(sourceCode, 0, "Test");
ABI_CHECK(callContractFunction("stat()"), encodeArgs(vector<u256>(5)));
ABI_CHECK(callContractFunction("dyn()"), encodeArgs(u256(0x20), u256(0)));
ABI_CHECK(callContractFunction("nested()"), encodeArgs(u256(0x20), u256(0)));
ABI_CHECK(callContractFunction("nestedStat()"), encodeArgs(vector<u256>(3 * 7)));
}
BOOST_AUTO_TEST_CASE(calldata_struct_short)
{
char const* sourceCode = R"(
@ -2718,38 +2540,6 @@ BOOST_AUTO_TEST_CASE(nested_mixed_string_as_public_mapping_key)
), encodeArgs(u256(i - 3)));
}
BOOST_AUTO_TEST_CASE(constant_string_literal)
{
char const* sourceCode = R"(
contract Test {
bytes32 constant public b = "abcdefghijklmnopq";
string constant public x = "abefghijklmnopqabcdefghijklmnopqabcdefghijklmnopqabca";
constructor() {
string memory xx = x;
bytes32 bb = b;
}
function getB() public returns (bytes32) { return b; }
function getX() public returns (string memory) { return x; }
function getX2() public returns (string memory r) { r = x; }
function unused() public returns (uint) {
"unusedunusedunusedunusedunusedunusedunusedunusedunusedunusedunusedunused";
return 2;
}
}
)";
compileAndRun(sourceCode);
string longStr = "abefghijklmnopqabcdefghijklmnopqabcdefghijklmnopqabca";
string shortStr = "abcdefghijklmnopq";
ABI_CHECK(callContractFunction("b()"), encodeArgs(shortStr));
ABI_CHECK(callContractFunction("x()"), encodeDyn(longStr));
ABI_CHECK(callContractFunction("getB()"), encodeArgs(shortStr));
ABI_CHECK(callContractFunction("getX()"), encodeDyn(longStr));
ABI_CHECK(callContractFunction("getX2()"), encodeDyn(longStr));
ABI_CHECK(callContractFunction("unused()"), encodeArgs(2));
}
BOOST_AUTO_TEST_CASE(library_call)
{
char const* sourceCode = R"(

View File

@ -118,9 +118,10 @@ void SyntaxTest::filterObtainedErrors()
{
for (auto const& currentError: filterErrors(compiler().errors(), true))
{
int locationStart = -1, locationEnd = -1;
int locationStart = -1;
int locationEnd = -1;
string sourceName;
if (auto location = boost::get_error_info<errinfo_sourceLocation>(*currentError))
if (SourceLocation const* location = currentError->sourceLocation())
{
solAssert(location->sourceName, "");
sourceName = *location->sourceName;

View File

@ -11,6 +11,9 @@ contract C {
// compileViaYul: also
// ----
// constructor(): 1, 2, 3 ->
// gas irOptimized: 143598
// gas legacy: 183490
// gas legacyOptimized: 151938
// a(uint256): 0 -> 1
// a(uint256): 1 -> 2
// a(uint256): 2 -> 3

View File

@ -11,5 +11,8 @@ contract Creator {
// compileViaYul: also
// ----
// constructor(): 1, 2, 3, 4 ->
// gas irOptimized: 132278
// gas legacy: 176789
// gas legacyOptimized: 129585
// r() -> 4
// ch() -> 3

View File

@ -10,5 +10,8 @@ contract Test {
// compileViaYul: also
// ----
// constructor(): 7, 0x40, 78, "abcdefghijklmnopqrstuvwxyzabcdef", "ghijklmnopqrstuvwxyzabcdefghijkl", "mnopqrstuvwxyz" ->
// gas irOptimized: 291443
// gas legacy: 309842
// gas legacyOptimized: 260801
// m_x() -> 7
// m_s() -> 0x20, 78, "abcdefghijklmnopqrstuvwxyzabcdef", "ghijklmnopqrstuvwxyzabcdefghijkl", "mnopqrstuvwxyz"

View File

@ -19,5 +19,8 @@ contract Main {
// compileViaYul: also
// ----
// constructor(): "abc", true
// gas irOptimized: 112563
// gas legacy: 145838
// gas legacyOptimized: 104017
// getFlag() -> true
// getName() -> "abc"

View File

@ -12,6 +12,9 @@ contract C {
// compileViaYul: also
// ----
// constructor(): 1, 2, 3, 4 ->
// gas irOptimized: 180731
// gas legacy: 221377
// gas legacyOptimized: 177671
// a() -> 1
// b(uint256): 0 -> 2
// b(uint256): 1 -> 3

View File

@ -15,4 +15,5 @@ contract B is A {
// compileViaYul: true
// ----
// constructor() ->
// gas irOptimized: 122233
// y() -> 42

View File

@ -12,4 +12,7 @@ contract B is A {
// compileViaYul: also
// ----
// constructor() ->
// gas irOptimized: 122233
// gas legacy: 135046
// gas legacyOptimized: 116176
// y() -> 42

View File

@ -11,5 +11,7 @@ contract C {
// compileViaYul: also
// ----
// constructor(): 2, 0 ->
// gas irOptimized: 104227
// gas legacy: 117158
// i() -> 2
// k() -> 0

View File

@ -23,6 +23,9 @@ contract D is B, C {
// compileViaYul: also
// ----
// constructor(): 2, 0 ->
// gas irOptimized: 159542
// gas legacy: 170665
// gas legacyOptimized: 145396
// i() -> 2
// j() -> 2
// k() -> 1

View File

@ -14,5 +14,8 @@ contract D is C {
// compileViaYul: also
// ----
// constructor(): 2, 0 ->
// gas irOptimized: 124844
// gas legacy: 139250
// gas legacyOptimized: 119367
// i() -> 2
// k() -> 1

View File

@ -0,0 +1,24 @@
contract C {
enum X { A, B }
event Log(X);
function test_log() public returns (uint) {
X garbled = X.A;
assembly {
garbled := 5
}
emit Log(garbled);
return 1;
}
function test_log_ok() public returns (uint) {
X x = X.A;
emit Log(x);
return 1;
}
}
// ====
// compileViaYul: also
// ----
// test_log_ok() -> 1
// ~ emit Log(uint8): 0x00
// test_log() -> FAILURE, hex"4e487b71", 0x21

View File

@ -17,6 +17,8 @@ contract C {
// compileViaYul: also
// ----
// constructor() ->
// gas legacy: 249112
// gas irOptimized: 177344
// gas legacy: 250376
// gas legacyOptimized: 174522
// deposit(bytes32), 18 wei: 0x1234 ->
// ~ emit Deposit(address,bytes32,uint256) from 0xf01f7809444bd9a93a854361c6fae3f23d9e23db: #0x0fdd67305928fcac8d213d1e47bfa6165cd0b87b, #0x1234, 0x00

View File

@ -20,4 +20,4 @@ contract C {
// ----
// constructor()
// ~ emit E((uint8,int16),(uint8,int16)): #0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5, 0x00, 0x00
// gas legacy: 150662
// gas legacy: 150602

View File

@ -76,7 +76,7 @@ contract FixedFeeRegistrar is Registrar {
// compileViaYul: also
// ----
// constructor()
// gas irOptimized: 425623
// gas irOptimized: 426283
// gas legacy: 936897
// gas legacyOptimized: 490983
// reserve(string), 69 ether: 0x20, 3, "abc" ->

View File

@ -178,9 +178,9 @@ contract DepositContract is IDepositContract, ERC165 {
// compileViaYul: also
// ----
// constructor()
// gas irOptimized: 1558013
// gas legacy: 2580394
// gas legacyOptimized: 1775403
// gas irOptimized: 1558001
// gas legacy: 2436584
// gas legacyOptimized: 1776483
// supportsInterface(bytes4): 0x0 -> 0
// supportsInterface(bytes4): 0xffffffff00000000000000000000000000000000000000000000000000000000 -> false # defined to be false by ERC-165 #
// supportsInterface(bytes4): 0x01ffc9a700000000000000000000000000000000000000000000000000000000 -> true # ERC-165 id #

View File

@ -50,8 +50,8 @@ contract test {
// compileViaYul: also
// ----
// constructor()
// gas irOptimized: 1924584
// gas legacy: 2602700
// gas irOptimized: 1924392
// gas legacy: 2480887
// gas legacyOptimized: 1874490
// div(int256,int256): 3141592653589793238, 88714123 -> 35412542528203691288251815328
// gas irOptimized: 22137

View File

@ -51,7 +51,7 @@ contract test {
// ----
// constructor()
// gas irOptimized: 1778342
// gas legacy: 2356230
// gas legacy: 2250130
// gas legacyOptimized: 1746528
// div(uint256,uint256): 3141592653589793238, 88714123 -> 35412542528203691288251815328
// gas irOptimized: 22004

View File

@ -36,7 +36,7 @@ contract test {
// ----
// constructor()
// gas irOptimized: 465357
// gas legacy: 733634
// gas legacy: 672749
// gas legacyOptimized: 479606
// prb_pi() -> 3141592656369545286
// gas irOptimized: 57478

View File

@ -52,7 +52,7 @@ contract test {
// ----
// constructor()
// gas irOptimized: 702619
// gas legacy: 1188228
// gas legacy: 1130761
// gas legacyOptimized: 750416
// toSlice(string): 0x20, 11, "hello world" -> 11, 0xa0
// gas irOptimized: 22660

Some files were not shown because too many files have changed in this diff Show More