mirror of
https://github.com/ethereum/solidity
synced 2023-10-03 13:03:40 +00:00
Merge branch 'develop' into FixPrefixVersionRangesDevelop
This commit is contained in:
commit
ea9a773d57
@ -7,6 +7,10 @@
|
||||
# - ems: Emscripten
|
||||
version: 2.1
|
||||
parameters:
|
||||
ubuntu-2004-docker-image:
|
||||
type: string
|
||||
# solbuildpackpusher/solidity-buildpack-deps:ubuntu2004-16
|
||||
default: "solbuildpackpusher/solidity-buildpack-deps@sha256:ee1def5806f40c35d583234e172ec5769bb9a08b6f5bbc713c1a2658846dbced"
|
||||
ubuntu-2204-docker-image:
|
||||
type: string
|
||||
# solbuildpackpusher/solidity-buildpack-deps:ubuntu2204-1
|
||||
@ -97,11 +101,13 @@ commands:
|
||||
parameters:
|
||||
label:
|
||||
type: string
|
||||
binary_path:
|
||||
type: string
|
||||
steps:
|
||||
- run: mkdir test-cases/
|
||||
- run: cd test-cases && ../scripts/isolate_tests.py ../test/
|
||||
- run: cd test-cases && ../scripts/bytecodecompare/prepare_report.py ../build/solc/solc --interface standard-json --report-file "../bytecode-report-<< parameters.label >>-json.txt"
|
||||
- run: cd test-cases && ../scripts/bytecodecompare/prepare_report.py ../build/solc/solc --interface cli --report-file "../bytecode-report-<< parameters.label >>-cli.txt"
|
||||
- run: cd test-cases && python3 ../scripts/isolate_tests.py ../test/
|
||||
- run: cd test-cases && python3 ../scripts/bytecodecompare/prepare_report.py << parameters.binary_path >> --interface standard-json --report-file "../bytecode-report-<< parameters.label >>-json.txt"
|
||||
- run: cd test-cases && python3 ../scripts/bytecodecompare/prepare_report.py << parameters.binary_path >> --interface cli --report-file "../bytecode-report-<< parameters.label >>-cli.txt"
|
||||
- store_artifacts:
|
||||
path: bytecode-report-<< parameters.label >>-json.txt
|
||||
- store_artifacts:
|
||||
@ -313,6 +319,27 @@ defaults:
|
||||
CXX: clang++
|
||||
MAKEFLAGS: -j 5
|
||||
|
||||
- base_ubuntu2004: &base_ubuntu2004
|
||||
docker:
|
||||
- image: << pipeline.parameters.ubuntu-2004-docker-image >>
|
||||
environment:
|
||||
TERM: xterm
|
||||
MAKEFLAGS: -j 3
|
||||
|
||||
- base_ubuntu2004_small: &base_ubuntu2004_small
|
||||
<<: *base_ubuntu2004
|
||||
resource_class: small
|
||||
environment:
|
||||
TERM: xterm
|
||||
MAKEFLAGS: -j 2
|
||||
|
||||
- base_ubuntu2004_xlarge: &base_ubuntu2004_xlarge
|
||||
<<: *base_ubuntu2004
|
||||
resource_class: xlarge
|
||||
environment:
|
||||
TERM: xterm
|
||||
MAKEFLAGS: -j 10
|
||||
|
||||
- base_ubuntu2204: &base_ubuntu2204
|
||||
docker:
|
||||
- image: << pipeline.parameters.ubuntu-2204-docker-image >>
|
||||
@ -454,7 +481,7 @@ defaults:
|
||||
requires:
|
||||
- b_ubu_force_release
|
||||
|
||||
- workflow_ubuntu2204_static: &workflow_ubuntu2204_static
|
||||
- workflow_ubuntu2004_static: &workflow_ubuntu2004_static
|
||||
<<: *workflow_trigger_on_tags
|
||||
requires:
|
||||
- b_ubu_static
|
||||
@ -518,91 +545,91 @@ defaults:
|
||||
python2: true
|
||||
|
||||
- job_native_test_ext_gnosis: &job_native_test_ext_gnosis
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_gnosis
|
||||
project: gnosis
|
||||
binary_type: native
|
||||
nodejs_version: '16.18'
|
||||
- job_native_test_ext_zeppelin: &job_native_test_ext_zeppelin
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_zeppelin
|
||||
project: zeppelin
|
||||
binary_type: native
|
||||
resource_class: large
|
||||
- job_native_test_ext_ens: &job_native_test_ext_ens
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_ens
|
||||
project: ens
|
||||
binary_type: native
|
||||
nodejs_version: '18.11'
|
||||
- job_native_test_ext_trident: &job_native_test_ext_trident
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_trident
|
||||
project: trident
|
||||
binary_type: native
|
||||
nodejs_version: '16.18'
|
||||
- job_native_test_ext_euler: &job_native_test_ext_euler
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_euler
|
||||
project: euler
|
||||
binary_type: native
|
||||
resource_class: medium
|
||||
- job_native_test_ext_yield_liquidator: &job_native_test_ext_yield_liquidator
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_yield_liquidator
|
||||
project: yield-liquidator
|
||||
binary_type: native
|
||||
- job_native_test_ext_bleeps: &job_native_test_ext_bleeps
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_bleeps
|
||||
project: bleeps
|
||||
binary_type: native
|
||||
resource_class: medium
|
||||
- job_native_test_ext_pool_together: &job_native_test_ext_pool_together
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_pool_together
|
||||
project: pool-together
|
||||
binary_type: native
|
||||
nodejs_version: '16.18'
|
||||
- job_native_test_ext_perpetual_pools: &job_native_test_ext_perpetual_pools
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_perpetual_pools
|
||||
project: perpetual-pools
|
||||
binary_type: native
|
||||
nodejs_version: '18.11'
|
||||
- job_native_test_ext_uniswap: &job_native_test_ext_uniswap
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_uniswap
|
||||
project: uniswap
|
||||
binary_type: native
|
||||
nodejs_version: '16.18'
|
||||
- job_native_test_ext_prb_math: &job_native_test_ext_prb_math
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_prb_math
|
||||
project: prb-math
|
||||
binary_type: native
|
||||
nodejs_version: '18.11'
|
||||
- job_native_test_ext_elementfi: &job_native_test_ext_elementfi
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_elementfi
|
||||
project: elementfi
|
||||
binary_type: native
|
||||
resource_class: medium
|
||||
- job_native_test_ext_brink: &job_native_test_ext_brink
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_brink
|
||||
project: brink
|
||||
binary_type: native
|
||||
nodejs_version: '18.11'
|
||||
- job_native_test_ext_chainlink: &job_native_test_ext_chainlink
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_chainlink
|
||||
project: chainlink
|
||||
binary_type: native
|
||||
nodejs_version: '16.18'
|
||||
resource_class: large # Tests run out of memory on a smaller machine
|
||||
- job_native_test_ext_gp2: &job_native_test_ext_gp2
|
||||
<<: *workflow_ubuntu2204_static
|
||||
<<: *workflow_ubuntu2004_static
|
||||
name: t_native_test_ext_gp2
|
||||
project: gp2
|
||||
binary_type: native
|
||||
@ -826,8 +853,11 @@ jobs:
|
||||
MAKEFLAGS: -j 10
|
||||
|
||||
b_ubu_static:
|
||||
# We temporarily keep building static release binaries on ubuntu 20.04
|
||||
# to avoid glibc incompatibilities.
|
||||
# See: https://github.com/ethereum/solidity/issues/13954
|
||||
# On large runs 2x faster than on medium. 3x on xlarge.
|
||||
<<: *base_ubuntu2204_xlarge
|
||||
<<: *base_ubuntu2004_xlarge
|
||||
environment:
|
||||
TERM: xterm
|
||||
MAKEFLAGS: -j 10
|
||||
@ -838,8 +868,14 @@ jobs:
|
||||
- run:
|
||||
name: strip binary
|
||||
command: strip build/solc/solc
|
||||
- store_artifacts: *artifacts_solc
|
||||
- persist_to_workspace: *artifacts_executables
|
||||
- store_artifacts:
|
||||
path: build/solc/solc
|
||||
destination: solc-static-linux
|
||||
- run: mv build/solc/solc build/solc/solc-static-linux
|
||||
- persist_to_workspace:
|
||||
root: build
|
||||
paths:
|
||||
- solc/solc-static-linux
|
||||
- gitter_notify_failure_unless_pr
|
||||
|
||||
b_ubu_codecov:
|
||||
@ -1248,7 +1284,7 @@ jobs:
|
||||
- run:
|
||||
name: External <<parameters.project>> tests (native)
|
||||
command: |
|
||||
test/externalTests/<<parameters.project>>.sh native /tmp/workspace/solc/solc
|
||||
test/externalTests/<<parameters.project>>.sh native /tmp/workspace/solc/solc-static-linux
|
||||
- store_artifacts:
|
||||
path: reports/externalTests/
|
||||
# persist_to_workspace fails if the directory does not exist and the test script will create
|
||||
@ -1389,6 +1425,23 @@ jobs:
|
||||
- store_artifacts: *artifacts_test_results
|
||||
- gitter_notify_failure_unless_pr
|
||||
|
||||
# Note: b_bytecode_ubu_static is required because b_ubu_static and b_ubu
|
||||
# are currently built on different Ubuntu base images.
|
||||
# It can be safely removed once we move both to the same Ubuntu version.
|
||||
b_bytecode_ubu_static:
|
||||
<<: *base_ubuntu2004_small
|
||||
environment:
|
||||
TERM: xterm
|
||||
MAKEFLAGS: -j 2
|
||||
LC_ALL: C
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: build
|
||||
- prepare_bytecode_report:
|
||||
label: "ubuntu2004-static"
|
||||
binary_path: "../build/solc/solc-static-linux"
|
||||
|
||||
b_bytecode_ubu:
|
||||
<<: *base_ubuntu2204_small
|
||||
environment:
|
||||
@ -1401,6 +1454,7 @@ jobs:
|
||||
at: build
|
||||
- prepare_bytecode_report:
|
||||
label: "ubuntu"
|
||||
binary_path: "../build/solc/solc"
|
||||
|
||||
b_bytecode_osx:
|
||||
<<: *base_osx
|
||||
@ -1414,6 +1468,7 @@ jobs:
|
||||
at: .
|
||||
- prepare_bytecode_report:
|
||||
label: "osx"
|
||||
binary_path: "../build/solc/solc"
|
||||
|
||||
b_bytecode_win:
|
||||
<<: *base_win_bash
|
||||
@ -1424,22 +1479,13 @@ jobs:
|
||||
# platforms so line ending conversions must absolutely be disabled.
|
||||
- run: git config --global core.autocrlf false
|
||||
- checkout
|
||||
# Ensure windows has python3 alias required by prepare_bytecode_report
|
||||
- run: ln -s /c/tools/miniconda3/python /c/tools/miniconda3/python3
|
||||
- attach_workspace:
|
||||
at: build
|
||||
- run: mkdir test-cases/
|
||||
- run: cd test-cases/ && python ../scripts/isolate_tests.py ../test/
|
||||
- run: cd test-cases/ && python ../scripts/bytecodecompare/prepare_report.py ../build/solc/Release/solc.exe --interface standard-json --report-file ../bytecode-report-windows-json.txt
|
||||
- run: cd test-cases/ && python ../scripts/bytecodecompare/prepare_report.py ../build/solc/Release/solc.exe --interface cli --report-file ../bytecode-report-windows-cli.txt
|
||||
- store_artifacts:
|
||||
path: bytecode-report-windows-json.txt
|
||||
- store_artifacts:
|
||||
path: bytecode-report-windows-cli.txt
|
||||
- persist_to_workspace:
|
||||
root: .
|
||||
paths:
|
||||
- bytecode-report-windows-json.txt
|
||||
- bytecode-report-windows-cli.txt
|
||||
- gitter_notify_failure_unless_pr
|
||||
- prepare_bytecode_report:
|
||||
label: "windows"
|
||||
binary_path: "../build/solc/Release/solc.exe"
|
||||
|
||||
b_bytecode_ems:
|
||||
<<: *base_node_small
|
||||
@ -1464,6 +1510,8 @@ jobs:
|
||||
environment:
|
||||
REPORT_FILES: |
|
||||
bytecode-report-emscripten.txt
|
||||
bytecode-report-ubuntu2004-static-json.txt
|
||||
bytecode-report-ubuntu2004-static-cli.txt
|
||||
bytecode-report-ubuntu-json.txt
|
||||
bytecode-report-ubuntu-cli.txt
|
||||
bytecode-report-osx-json.txt
|
||||
@ -1506,10 +1554,10 @@ jobs:
|
||||
name: Gather and rename binaries from dependent jobs
|
||||
command: |
|
||||
mkdir github/
|
||||
cp workspace/solc/solc github/solc-static-linux
|
||||
cp workspace/build/solc/solc github/solc-macos
|
||||
cp workspace/solc/Release/solc.exe github/solc-windows.exe
|
||||
cp workspace/soljson.js github/soljson.js
|
||||
cp workspace/solc/solc-static-linux github/solc-static-linux
|
||||
cp workspace/build/solc/solc github/solc-macos
|
||||
cp workspace/solc/Release/solc.exe github/solc-windows.exe
|
||||
cp workspace/soljson.js github/soljson.js
|
||||
|
||||
cd github/
|
||||
tar --create --file ../github-binaries.tar *
|
||||
@ -1635,6 +1683,10 @@ workflows:
|
||||
- t_win_soltest: *workflow_win
|
||||
|
||||
# Bytecode comparison:
|
||||
- b_bytecode_ubu_static:
|
||||
<<: *workflow_trigger_on_tags
|
||||
requires:
|
||||
- b_ubu_static
|
||||
- b_bytecode_ubu:
|
||||
<<: *workflow_trigger_on_tags
|
||||
requires:
|
||||
@ -1654,6 +1706,7 @@ workflows:
|
||||
- t_bytecode_compare:
|
||||
<<: *workflow_trigger_on_tags
|
||||
requires:
|
||||
- b_bytecode_ubu_static
|
||||
- b_bytecode_ubu
|
||||
- b_bytecode_win
|
||||
- b_bytecode_osx
|
||||
|
@ -9,6 +9,7 @@ Compiler Features:
|
||||
|
||||
Bugfixes:
|
||||
* Code Generator: Avoid including references to the deployed label of referenced functions if they are called right away.
|
||||
* Assembler: Avoid duplicating subassembly bytecode where possible.
|
||||
* ContractLevelChecker: Properly distinguish the case of missing base constructor arguments from having an unimplemented base function.
|
||||
* SMTChecker: Fix internal error when using the custom NatSpec annotation to abstract free functions.
|
||||
* TypeChecker: Also allow external library functions in ``using for``.
|
||||
|
@ -429,7 +429,7 @@ in Visual Studio 2019 Build Tools or Visual Studio 2019:
|
||||
* C++/CLI support
|
||||
|
||||
.. _Visual Studio 2019: https://www.visualstudio.com/vs/
|
||||
.. _Visual Studio 2019 Build Tools: https://www.visualstudio.com/downloads/#build-tools-for-visual-studio-2019
|
||||
.. _Visual Studio 2019 Build Tools: https://visualstudio.microsoft.com/vs/older-downloads/#visual-studio-2019-and-other-products
|
||||
|
||||
We have a helper script which you can use to install all required external dependencies:
|
||||
|
||||
|
@ -300,9 +300,9 @@ and then they will be executed and distributed among all participating nodes.
|
||||
If two transactions contradict each other, the one that ends up being second will
|
||||
be rejected and not become part of the block.
|
||||
|
||||
These blocks form a linear sequence in time and that is where the word "blockchain"
|
||||
derives from. Blocks are added to the chain in rather regular intervals - for
|
||||
Ethereum this is roughly every 17 seconds.
|
||||
These blocks form a linear sequence in time, and that is where the word "blockchain" derives from.
|
||||
Blocks are added to the chain at regular intervals, although these intervals may be subject to change in the future.
|
||||
For the most up-to-date information, it is recommended to monitor the network, for example, on `Etherscan <https://etherscan.io/chart/blocktime>`_.
|
||||
|
||||
As part of the "order selection mechanism" (which is called "mining") it may happen that
|
||||
blocks are reverted from time to time, but only at the "tip" of the chain. The more
|
||||
|
@ -85,7 +85,6 @@ It can be compiled using ``solc --strict-assembly``. The builtin functions
|
||||
|
||||
It is also possible to implement the same function using a for-loop
|
||||
instead of with recursion. Here, ``lt(a, b)`` computes whether ``a`` is less than ``b``.
|
||||
less-than comparison.
|
||||
|
||||
.. code-block:: yul
|
||||
|
||||
|
@ -679,13 +679,25 @@ LinkerObject const& Assembly::assemble() const
|
||||
// Append an INVALID here to help tests find miscompilation.
|
||||
ret.bytecode.push_back(static_cast<uint8_t>(Instruction::INVALID));
|
||||
|
||||
map<LinkerObject, size_t> subAssemblyOffsets;
|
||||
for (auto const& [subIdPath, bytecodeOffset]: subRef)
|
||||
{
|
||||
LinkerObject subObject = subAssemblyById(subIdPath)->assemble();
|
||||
bytesRef r(ret.bytecode.data() + bytecodeOffset, bytesPerDataRef);
|
||||
toBigEndian(ret.bytecode.size(), r);
|
||||
ret.append(subAssemblyById(subIdPath)->assemble());
|
||||
}
|
||||
|
||||
// In order for de-duplication to kick in, not only must the bytecode be identical, but
|
||||
// link and immutables references as well.
|
||||
if (size_t* subAssemblyOffset = util::valueOrNullptr(subAssemblyOffsets, subObject))
|
||||
toBigEndian(*subAssemblyOffset, r);
|
||||
else
|
||||
{
|
||||
toBigEndian(ret.bytecode.size(), r);
|
||||
subAssemblyOffsets[subObject] = ret.bytecode.size();
|
||||
ret.bytecode += subObject.bytecode;
|
||||
}
|
||||
for (auto const& ref: subObject.linkReferences)
|
||||
ret.linkReferences[ref.first + subAssemblyOffsets[subObject]] = ref.second;
|
||||
}
|
||||
for (auto const& i: tagRef)
|
||||
{
|
||||
size_t subId;
|
||||
|
@ -77,3 +77,9 @@ LinkerObject::matchLibrary(
|
||||
return &it->second;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
bool LinkerObject::operator<(LinkerObject const& _other) const
|
||||
{
|
||||
return tie(this->bytecode, this->linkReferences, this->immutableReferences) <
|
||||
tie(_other.bytecode, _other.linkReferences, _other.immutableReferences);
|
||||
}
|
||||
|
@ -72,6 +72,8 @@ struct LinkerObject
|
||||
/// of the first 18 bytes of the keccak-256 hash of @a _libraryName.
|
||||
static std::string libraryPlaceholder(std::string const& _libraryName);
|
||||
|
||||
bool operator<(LinkerObject const& _other) const;
|
||||
|
||||
private:
|
||||
static util::h160 const* matchLibrary(
|
||||
std::string const& _linkRefName,
|
||||
|
75
scripts/docker/buildpack-deps/Dockerfile.ubuntu2004
Normal file
75
scripts/docker/buildpack-deps/Dockerfile.ubuntu2004
Normal file
@ -0,0 +1,75 @@
|
||||
# vim:syntax=dockerfile
|
||||
#------------------------------------------------------------------------------
|
||||
# Dockerfile for building and testing Solidity Compiler on CI
|
||||
# Target: Ubuntu 19.04 (Disco Dingo)
|
||||
# URL: https://hub.docker.com/r/ethereum/solidity-buildpack-deps
|
||||
#
|
||||
# This file is part of solidity.
|
||||
#
|
||||
# solidity is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# solidity is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with solidity. If not, see <http://www.gnu.org/licenses/>
|
||||
#
|
||||
# (c) 2016-2019 solidity contributors.
|
||||
#------------------------------------------------------------------------------
|
||||
FROM buildpack-deps:focal AS base
|
||||
LABEL version="16"
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -ex; \
|
||||
dist=$(grep DISTRIB_CODENAME /etc/lsb-release | cut -d= -f2); \
|
||||
echo "deb http://ppa.launchpad.net/ethereum/cpp-build-deps/ubuntu $dist main" >> /etc/apt/sources.list ; \
|
||||
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 1c52189c923f6ca9 ; \
|
||||
apt-get update; \
|
||||
apt-get install -qqy --no-install-recommends \
|
||||
build-essential \
|
||||
software-properties-common \
|
||||
cmake ninja-build \
|
||||
libboost-filesystem-dev libboost-test-dev libboost-system-dev \
|
||||
libboost-program-options-dev \
|
||||
libcvc4-dev libz3-static-dev z3-static jq \
|
||||
; \
|
||||
apt-get install -qy python3-pip python3-sphinx; \
|
||||
pip3 install codecov; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
FROM base AS libraries
|
||||
|
||||
# EVMONE
|
||||
RUN set -ex; \
|
||||
cd /usr/src; \
|
||||
git clone --branch="v0.9.1" --recurse-submodules https://github.com/ethereum/evmone.git; \
|
||||
cd evmone; \
|
||||
mkdir build; \
|
||||
cd build; \
|
||||
cmake -G Ninja -DBUILD_SHARED_LIBS=ON -DCMAKE_INSTALL_PREFIX="/usr" ..; \
|
||||
ninja; \
|
||||
ninja install/strip; \
|
||||
rm -rf /usr/src/evmone
|
||||
|
||||
# HERA
|
||||
RUN set -ex; \
|
||||
cd /usr/src; \
|
||||
git clone --branch="v0.6.0" --depth 1 --recurse-submodules https://github.com/ewasm/hera.git; \
|
||||
cd hera; \
|
||||
mkdir build; \
|
||||
cd build; \
|
||||
cmake -G Ninja -DBUILD_SHARED_LIBS=ON -DCMAKE_INSTALL_PREFIX="/usr" ..; \
|
||||
ninja; \
|
||||
ninja install/strip; \
|
||||
rm -rf /usr/src/hera
|
||||
|
||||
FROM base
|
||||
COPY --from=libraries /usr/lib /usr/lib
|
||||
COPY --from=libraries /usr/bin /usr/bin
|
||||
COPY --from=libraries /usr/include /usr/include
|
@ -1,25 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -eu
|
||||
|
||||
BASE_PATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 && pwd -P )"
|
||||
|
||||
mkdir -p build
|
||||
cd build
|
||||
cmake ../../../
|
||||
make soltest
|
||||
cd test/
|
||||
echo "running soltest on 'semanticTests/extracted'..."
|
||||
./soltest --color_output=false --log_level=test_suite -t semanticTests/extracted/ -- --testpath "${BASE_PATH}/../../test" --no-smt --evmonepath /Users/alex/evmone/lib/libevmone.dylib --show-messages --show-metadata > "${BASE_PATH}/extracted-tests.trace"
|
||||
echo "running soltest on 'semanticTests/extracted'... done"
|
||||
|
||||
cd "$BASE_PATH"
|
||||
git clone git@github.com:ethereum/solidity.git solidity-develop
|
||||
cd solidity-develop
|
||||
mkdir -p build
|
||||
cd build
|
||||
cmake ..
|
||||
make soltest
|
||||
cd test/
|
||||
echo "running soltest on 'SolidityEndToEndTest'..."
|
||||
./soltest --color_output=false --log_level=test_suite -t SolidityEndToEndTest/ -- --testpath "${BASE_PATH}/solidity-develop/test" --no-smt --evmonepath /Users/alex/evmone/lib/libevmone.dylib --show-messages --show-metadata > "${BASE_PATH}/endToEndExtraction-tests.trace"
|
||||
echo "running soltest on 'SolidityEndToEndTest'... done"
|
@ -1,183 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# pylint: disable=consider-using-enumerate, import-error
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import getopt
|
||||
import tempfile
|
||||
from getkey import getkey
|
||||
|
||||
|
||||
def parse_call(call):
|
||||
function = ''
|
||||
arguments = ""
|
||||
results = ""
|
||||
search = re.search(r'// (.*):(.*)\s->\s(.*)', call, re.MULTILINE | re.DOTALL)
|
||||
if search:
|
||||
function = search.group(1)
|
||||
arguments = search.group(2)
|
||||
results = search.group(3)
|
||||
if results.find("#") != -1:
|
||||
results = results[:results.find("#")]
|
||||
else:
|
||||
search = re.search(r'// (.*)(.*)\s->\s(.*)', call, re.MULTILINE | re.DOTALL)
|
||||
if search:
|
||||
function = search.group(1)
|
||||
arguments = search.group(2)
|
||||
results = search.group(3)
|
||||
if results.find("#") != -1:
|
||||
results = results[:results.find("#")]
|
||||
if function.find("wei") >= 0:
|
||||
function = function[:function.find(",")]
|
||||
return function.strip(), arguments.strip(), results.strip()
|
||||
|
||||
|
||||
def colorize(left, right, index):
|
||||
red = "\x1b[31m"
|
||||
yellow = "\x1b[33m"
|
||||
reset = "\x1b[0m"
|
||||
colors = [red, yellow]
|
||||
color = colors[index % len(colors)]
|
||||
function, _arguments, _results = parse_call(right)
|
||||
left = left.replace("compileAndRun", color + "compileAndRun" + reset)
|
||||
right = right.replace("constructor", color + "constructor" + reset)
|
||||
if function:
|
||||
left = left.replace(function, color + function + reset)
|
||||
right = right.replace(function, color + function + reset)
|
||||
if left.find(function):
|
||||
bottom = " " * (left.find(function) - 4) + right
|
||||
else:
|
||||
bottom = " " + right
|
||||
return " " + left + "\n" + bottom # " {:<90} {:<90}\n{}".format(left, right, bottom)
|
||||
|
||||
|
||||
def get_checks(content, sol_file_path):
|
||||
constructors = []
|
||||
checks = []
|
||||
for line in content.split("\n"):
|
||||
line = line.strip()
|
||||
if line.startswith("compileAndRun"):
|
||||
constructors.append(line)
|
||||
if line.startswith("ABI_CHECK") or line.startswith("BOOST_REQUIRE"):
|
||||
checks.append(line)
|
||||
with open(sol_file_path, "r", encoding='utf8') as sol_file:
|
||||
sol_constructors = []
|
||||
sol_checks = []
|
||||
inside_expectations = False
|
||||
for line in sol_file.readlines():
|
||||
if line.startswith("// constructor()"):
|
||||
sol_constructors.append(line)
|
||||
elif inside_expectations and line.startswith("// "):
|
||||
sol_checks.append(line)
|
||||
if line.startswith("// ----"):
|
||||
inside_expectations = True
|
||||
sol_file.close()
|
||||
if len(constructors) == len(sol_constructors) == 1:
|
||||
checks.insert(0, constructors[0])
|
||||
sol_checks.insert(0, sol_constructors[0])
|
||||
return checks, sol_checks
|
||||
|
||||
|
||||
def show_test(name, content, sol_file_path, current_test, test_count):
|
||||
with tempfile.NamedTemporaryFile(delete=False) as cpp_file:
|
||||
cpp_file.write(content.encode())
|
||||
cpp_file.close()
|
||||
|
||||
os.system("clear")
|
||||
print(str(current_test) + " / " + str(test_count) + " - " + name + "\n")
|
||||
diff_env = os.getenv('DIFF', "/usr/local/bin/colordiff -a -d -w -y -W 200 ")
|
||||
os.system(diff_env + " " + cpp_file.name + " " + sol_file_path)
|
||||
os.unlink(cpp_file.name)
|
||||
print("\n")
|
||||
|
||||
checks, sol_checks = get_checks(content, sol_file_path)
|
||||
|
||||
if len(checks) == len(sol_checks):
|
||||
for i in range(0, len(checks)):
|
||||
print(colorize(checks[i].strip(), sol_checks[i].strip(), i))
|
||||
else:
|
||||
print("warning: check count not matching. this should not happen!")
|
||||
|
||||
what = ""
|
||||
print("\nContinue? (ENTER) Abort? (ANY OTHER KEY)")
|
||||
while what != '\n':
|
||||
what = getkey()
|
||||
if what != '\n':
|
||||
sys.exit(0)
|
||||
print()
|
||||
|
||||
|
||||
def get_tests(e2e_path):
|
||||
tests = []
|
||||
for f in os.listdir(e2e_path):
|
||||
if f.endswith(".sol"):
|
||||
tests.append(f.replace(".sol", ""))
|
||||
return tests
|
||||
|
||||
|
||||
def process_input_file(e2e_path, input_file, interactive):
|
||||
tests = get_tests(e2e_path)
|
||||
with open(input_file, "r", encoding='utf8') as cpp_file:
|
||||
inside_test = False
|
||||
test_name = ""
|
||||
inside_extracted_test = False
|
||||
new_lines = 0
|
||||
count = 0
|
||||
test_content = ""
|
||||
for line in cpp_file.readlines():
|
||||
test = re.search(r'BOOST_AUTO_TEST_CASE\((.*)\)', line, re.M | re.I)
|
||||
if test:
|
||||
test_name = test.group(1)
|
||||
inside_test = True
|
||||
inside_extracted_test = inside_test & (test_name in tests)
|
||||
if inside_extracted_test:
|
||||
count = count + 1
|
||||
|
||||
if interactive and inside_extracted_test:
|
||||
test_content = test_content + line
|
||||
|
||||
if not inside_extracted_test:
|
||||
if line == "\n":
|
||||
new_lines = new_lines + 1
|
||||
else:
|
||||
new_lines = 0
|
||||
if not interactive and new_lines <= 1:
|
||||
sys.stdout.write(line)
|
||||
|
||||
if line == "}\n":
|
||||
if interactive and inside_extracted_test:
|
||||
show_test(test_name, test_content.strip(), e2e_path + "/" + test_name + ".sol", count, len(tests))
|
||||
test_content = ""
|
||||
inside_test = False
|
||||
cpp_file.close()
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def main(argv):
|
||||
interactive = False
|
||||
input_file = None
|
||||
try:
|
||||
opts, _args = getopt.getopt(argv, "if:")
|
||||
except getopt.GetoptError:
|
||||
print("./remove-testcases.py [-i] [-f <full path to SolidityEndToEndTest.cpp>]")
|
||||
sys.exit(1)
|
||||
|
||||
for opt, arg in opts:
|
||||
if opt == '-i':
|
||||
interactive = True
|
||||
elif opt in '-f':
|
||||
input_file = arg
|
||||
|
||||
base_path = os.path.dirname(__file__)
|
||||
|
||||
if not input_file:
|
||||
input_file = base_path + "/../../test/libsolidity/SolidityEndToEndTest.cpp"
|
||||
|
||||
e2e_path = base_path + "/../../test/libsolidity/semanticTests/extracted"
|
||||
|
||||
process_input_file(e2e_path, input_file, interactive)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv[1:])
|
@ -1,214 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# - SolidityEndToEndTest.trace was created with soltest with the following command on
|
||||
# ./soltest --color_output=false --log_level=test_suite -t SolidityEndToEndTest/ -- --no-smt
|
||||
# --evmonepath /Users/alex/evmone/lib/libevmone.dylib --show-messages > SolidityEndToEndTest.trace
|
||||
# - a trace of the semantic tests can be created by using
|
||||
# ./soltest --color_output=false --log_level=test_suite -t semanticTests/extracted/ -- --no-smt
|
||||
# --evmonepath /Users/alex/evmone/lib/libevmone.dylib --show-messages > semanticTests.trace
|
||||
#
|
||||
# verify-testcases.py will compare both traces. If these traces are identical, the extracted tests were
|
||||
# identical with the tests specified in SolidityEndToEndTest.cpp.
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import getopt
|
||||
import json
|
||||
|
||||
|
||||
class Trace:
|
||||
def __init__(self, kind, parameter):
|
||||
self.kind = kind
|
||||
self.parameter = parameter
|
||||
self._input = ""
|
||||
self._output = ""
|
||||
self.value = ""
|
||||
self.result = ""
|
||||
self.gas = ""
|
||||
|
||||
def get_input(self):
|
||||
return self._input
|
||||
|
||||
def set_input(self, bytecode):
|
||||
if self.kind == "create":
|
||||
# remove cbor encoded metadata from bytecode
|
||||
length = int(bytecode[-4:], 16) * 2
|
||||
self._input = bytecode[:len(bytecode) - length - 4]
|
||||
|
||||
def get_output(self):
|
||||
return self._output
|
||||
|
||||
def set_output(self, output):
|
||||
if self.kind == "create":
|
||||
# remove cbor encoded metadata from bytecode
|
||||
length = int(output[-4:], 16) * 2
|
||||
self._output = output[:len(output) - length - 4]
|
||||
|
||||
def __str__(self):
|
||||
# we ignore the used gas
|
||||
result = str(
|
||||
"kind='" + self.kind + "' parameter='" + self.parameter + "' input='" + self._input +
|
||||
"' output='" + self._output + "' value='" + self.value + "' result='" + self.result + "'"
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
class TestCase:
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.metadata = None
|
||||
self.traces = []
|
||||
|
||||
def add_trace(self, kind, parameter):
|
||||
trace = Trace(kind, parameter)
|
||||
self.traces.append(trace)
|
||||
return trace
|
||||
|
||||
|
||||
class TraceAnalyser:
|
||||
def __init__(self, file):
|
||||
self.file = file
|
||||
self.tests = {}
|
||||
self.ready = False
|
||||
|
||||
def analyse(self):
|
||||
with open(self.file, "r", encoding='utf8') as trace_file:
|
||||
trace = None
|
||||
test_case = None
|
||||
for line in trace_file.readlines():
|
||||
test = re.search(r'Entering test case "(.*)"', line, re.M | re.I)
|
||||
if test:
|
||||
test_name = test.group(1)
|
||||
test_case = TestCase(test_name)
|
||||
self.tests[test_name] = test_case
|
||||
|
||||
metadata = re.search(r'\s*metadata:\s*(.*)$', line, re.M | re.I)
|
||||
if metadata:
|
||||
test_case.metadata = json.loads(metadata.group(1))
|
||||
del test_case.metadata["sources"]
|
||||
del test_case.metadata["compiler"]["version"]
|
||||
|
||||
create = re.search(r'CREATE\s*([a-fA-F0-9]*):', line, re.M | re.I)
|
||||
if create:
|
||||
trace = test_case.add_trace("create", create.group(1))
|
||||
|
||||
call = re.search(r'CALL\s*([a-fA-F0-9]*)\s*->\s*([a-fA-F0-9]*):', line, re.M | re.I)
|
||||
if call:
|
||||
trace = test_case.add_trace("call", call.group(1)) # + "->" + call.group(2))
|
||||
|
||||
if not create and not call:
|
||||
self.parse_parameters(line, trace)
|
||||
|
||||
trace_file.close()
|
||||
|
||||
print(self.file + ":", len(self.tests), "test-cases.")
|
||||
|
||||
self.ready = True
|
||||
|
||||
@staticmethod
|
||||
def parse_parameters(line, trace):
|
||||
input_match = re.search(r'\s*in:\s*([a-fA-F0-9]*)', line, re.M | re.I)
|
||||
if input_match:
|
||||
trace.input = input_match.group(1)
|
||||
output_match = re.search(r'\s*out:\s*([a-fA-F0-9]*)', line, re.M | re.I)
|
||||
if output_match:
|
||||
trace.output = output_match.group(1)
|
||||
result_match = re.search(r'\s*result:\s*([a-fA-F0-9]*)', line, re.M | re.I)
|
||||
if result_match:
|
||||
trace.result = result_match.group(1)
|
||||
gas_used_match = re.search(r'\s*gas\sused:\s*([a-fA-F0-9]*)', line, re.M | re.I)
|
||||
if gas_used_match:
|
||||
trace.gas = gas_used_match.group(1)
|
||||
value_match = re.search(r'\s*value:\s*([a-fA-F0-9]*)', line, re.M | re.I)
|
||||
if value_match:
|
||||
trace.value = value_match.group(1)
|
||||
|
||||
def diff(self, analyser):
|
||||
if not self.ready:
|
||||
self.analyse()
|
||||
if not analyser.ready:
|
||||
analyser.analyse()
|
||||
|
||||
intersection = set(self.tests.keys()) & set(analyser.tests.keys())
|
||||
mismatches = set()
|
||||
|
||||
for test_name in intersection:
|
||||
left = self.tests[test_name]
|
||||
right = analyser.tests[test_name]
|
||||
if json.dumps(left.metadata) != json.dumps(right.metadata):
|
||||
mismatches.add(
|
||||
(test_name, "metadata where different: " + json.dumps(left.metadata) + " != " + json.dumps(
|
||||
right.metadata)))
|
||||
if len(left.traces) != len(right.traces):
|
||||
mismatches.add((test_name, "trace count are different: " + str(len(left.traces)) +
|
||||
" != " + str(len(right.traces))))
|
||||
else:
|
||||
self.check_traces(test_name, left, right, mismatches)
|
||||
|
||||
for mismatch in mismatches:
|
||||
print(mismatch[0])
|
||||
print(mismatch[1])
|
||||
|
||||
print(len(intersection), "test-cases - ", len(mismatches), " mismatche(s)")
|
||||
|
||||
@classmethod
|
||||
def check_traces(cls, test_name, left, right, mismatches):
|
||||
for trace_id, trace in enumerate(left.traces):
|
||||
left_trace = trace
|
||||
right_trace = right.traces[trace_id]
|
||||
assert left_trace.kind == right_trace.kind
|
||||
if str(left_trace) != str(right_trace):
|
||||
mismatch_info = " " + str(left_trace) + "\n"
|
||||
mismatch_info += " " + str(right_trace) + "\n"
|
||||
mismatch_info += " "
|
||||
for ch in range(0, len(str(left_trace))):
|
||||
if ch < len(str(left_trace)) and ch < len(str(right_trace)):
|
||||
if str(left_trace)[ch] != str(right_trace)[ch]:
|
||||
mismatch_info += "|"
|
||||
else:
|
||||
mismatch_info += " "
|
||||
else:
|
||||
mismatch_info += "|"
|
||||
mismatch_info += "\n"
|
||||
mismatches.add((test_name, mismatch_info))
|
||||
|
||||
|
||||
def main(argv):
|
||||
extracted_tests_trace_file = None
|
||||
end_to_end_trace_file = None
|
||||
try:
|
||||
opts, _args = getopt.getopt(argv, "s:e:")
|
||||
except getopt.GetoptError:
|
||||
print("verify-testcases.py [-s <path to semantic-trace>] [-e <path to endToEndExtraction-trace>]")
|
||||
sys.exit(2)
|
||||
|
||||
for opt, arg in opts:
|
||||
if opt in '-s':
|
||||
extracted_tests_trace_file = arg
|
||||
elif opt in '-e':
|
||||
end_to_end_trace_file = arg
|
||||
|
||||
base_path = os.path.dirname(__file__)
|
||||
if not extracted_tests_trace_file:
|
||||
extracted_tests_trace_file = base_path + "/extracted-tests.trace"
|
||||
if not end_to_end_trace_file:
|
||||
end_to_end_trace_file = base_path + "/endToEndExtraction-tests.trace"
|
||||
|
||||
for f in [extracted_tests_trace_file, end_to_end_trace_file]:
|
||||
if not os.path.isfile(f):
|
||||
print("trace file '" + f + "' not found. aborting.")
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isfile(extracted_tests_trace_file):
|
||||
print("semantic trace file '" + extracted_tests_trace_file + "' not found. aborting.")
|
||||
sys.exit(1)
|
||||
|
||||
semantic_trace = TraceAnalyser(extracted_tests_trace_file)
|
||||
end_to_end_trace = TraceAnalyser(end_to_end_trace_file)
|
||||
|
||||
semantic_trace.diff(end_to_end_trace)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv[1:])
|
@ -138,10 +138,6 @@ function zeppelin_test
|
||||
sed -i "s|describe(\('reverting initialization'\)|describe.skip(\1|g" test/proxy/Proxy.behaviour.js
|
||||
sed -i "s|it(\('does not allow remote callback'\)|it.skip(\1|g" test/security/ReentrancyGuard.test.js
|
||||
|
||||
# TODO: Remove this when https://github.com/OpenZeppelin/openzeppelin-contracts/issues/4049 gets fixed
|
||||
sed -i "s|it(\('Succeeded'\)|it.skip(\1|g" test/governance/Governor.test.js
|
||||
sed -i "s|it(\('Delay is extended to prevent last minute take-over'\)|it.skip(\1|g" test/governance/extensions/GovernorPreventLateQuorum.test.js
|
||||
|
||||
neutralize_package_json_hooks
|
||||
force_hardhat_compiler_binary "$config_file" "$BINARY_TYPE" "$BINARY_PATH"
|
||||
force_hardhat_compiler_settings "$config_file" "$(first_word "$SELECTED_PRESETS")"
|
||||
|
@ -0,0 +1,42 @@
|
||||
contract A {
|
||||
function longdata() pure external returns (bytes memory) {
|
||||
return
|
||||
"xasopca.pngaibngidak.jbtnudak.cAP.BRRSMCPJAGPD KIAJDOMHUKR,SCPID"
|
||||
"xasopca.pngaibngidak.jbtnudak.cAP.BRRSMCPJAGPD KIAJDOMHUKR,SCPID"
|
||||
"M,SEYBDXCNTKIMNJGO;DUIAQBQUEHAKMPGIDSAJCOUKANJBCUEBKNA.GIAKMV.TI"
|
||||
"AJMO<KXBANJCPGUD ABKCJIDHA NKIMAJU,EKAMHSO;PYCAKUM,L.UCA MR;KITA"
|
||||
"M,SEYBDXCNTKIMNJGO;DUIAQBQUEHAKMPGIDSAJCOUKANJBCUEBKNA.GIAKMV.TI"
|
||||
"AJMO<KXBANJCPGUD ABKCJIDHA NKIMAJU,EKAMHSO;PYCAKUM,L.UCA MR;KITA"
|
||||
" .RPOKIDAS,.CKUMT.,ORKAD ,NOKIDHA .CGKIAD OVHAMS CUAOGT DAKN OIT"
|
||||
"xasopca.pngaibngidak.jbtnudak.cAP.BRRSMCPJAGPD KIAJDOMHUKR,SCPID"
|
||||
"M,SEYBDXCNTKIMNJGO;DUIAQBQUEHAKMPGIDSAJCOUKANJBCUEBKNA.GIAKMV.TI"
|
||||
"AJMO<KXBANJCPGUD ABKCJIDHA NKIMAJU,EKAMHSO;PYCAKUM,L.UCA MR;KITA"
|
||||
"apibakrpidbacnidkacjadtnpdkylca.,jda,r.kuadc,jdlkjd',c'dj, ncg d"
|
||||
"anosumantkudkc,djntudkantuadnc,ui,c.ud,.nujdncud,j.rsch'pkl.'pih";
|
||||
}
|
||||
}
|
||||
|
||||
contract C {
|
||||
constructor() {
|
||||
}
|
||||
|
||||
function a() external returns (bytes memory) {
|
||||
return type(A).creationCode;
|
||||
}
|
||||
|
||||
function b() external returns (A) {
|
||||
return new A();
|
||||
}
|
||||
|
||||
function test() public view returns (bool) {
|
||||
uint x;
|
||||
assembly {
|
||||
x := codesize()
|
||||
}
|
||||
return type(A).creationCode.length < x &&
|
||||
x < 2 * type(A).creationCode.length;
|
||||
}
|
||||
}
|
||||
|
||||
// ----
|
||||
// test() -> true
|
@ -17,7 +17,7 @@ contract C {
|
||||
// EVMVersion: >=byzantium
|
||||
// ----
|
||||
// constructor(), 20 wei
|
||||
// gas irOptimized: 184005
|
||||
// gas irOptimized: 179697
|
||||
// gas legacy: 294335
|
||||
// gas legacyOptimized: 173427
|
||||
// f(uint256): 20 -> 0x137aa4dfc0911524504fcd4d98501f179bc13b4a
|
||||
|
@ -0,0 +1,48 @@
|
||||
library L1 {
|
||||
function add(uint256 a, uint256 b) external pure returns (uint256) {
|
||||
return a + b + 1;
|
||||
}
|
||||
}
|
||||
|
||||
library L2 {
|
||||
function add(uint256 a, uint256 b) external pure returns (uint256) {
|
||||
return a + b + 2;
|
||||
}
|
||||
}
|
||||
|
||||
contract A {
|
||||
uint256 sum;
|
||||
constructor() {
|
||||
sum = L1.add(1, 2);
|
||||
}
|
||||
function getSum() external view returns(uint256) {
|
||||
return sum;
|
||||
}
|
||||
}
|
||||
|
||||
contract B {
|
||||
uint256 sum;
|
||||
constructor() {
|
||||
sum = L2.add(1, 2);
|
||||
}
|
||||
function getSum() external view returns(uint256) {
|
||||
return sum;
|
||||
}
|
||||
}
|
||||
|
||||
contract C {
|
||||
A a = new A();
|
||||
B b = new B();
|
||||
function aSum() external view returns(uint256) {
|
||||
return a.getSum();
|
||||
}
|
||||
function bSum() external view returns(uint256) {
|
||||
return b.getSum();
|
||||
}
|
||||
}
|
||||
|
||||
// ----
|
||||
// library: L1
|
||||
// library: L2
|
||||
// aSum() -> 4
|
||||
// bSum() -> 5
|
@ -249,6 +249,6 @@ object "A" {
|
||||
// invalid
|
||||
// }
|
||||
// }
|
||||
// Bytecode: 600060996045603f60866013608560016084600189600055886020558760405586606055856080558460a0558360c0558260e055816101005580610120556101406000f3fe602a6013603d6001603e600185600055846020558360405582606055816080558060a05560c06000f3fe60126001816000558060205560406000f3fefefefefefe60126001816000558060205560406000f3fefe
|
||||
// Opcodes: PUSH1 0x0 PUSH1 0x99 PUSH1 0x45 PUSH1 0x3F PUSH1 0x86 PUSH1 0x13 PUSH1 0x85 PUSH1 0x1 PUSH1 0x84 PUSH1 0x1 DUP10 PUSH1 0x0 SSTORE DUP9 PUSH1 0x20 SSTORE DUP8 PUSH1 0x40 SSTORE DUP7 PUSH1 0x60 SSTORE DUP6 PUSH1 0x80 SSTORE DUP5 PUSH1 0xA0 SSTORE DUP4 PUSH1 0xC0 SSTORE DUP3 PUSH1 0xE0 SSTORE DUP2 PUSH2 0x100 SSTORE DUP1 PUSH2 0x120 SSTORE PUSH2 0x140 PUSH1 0x0 RETURN INVALID PUSH1 0x2A PUSH1 0x13 PUSH1 0x3D PUSH1 0x1 PUSH1 0x3E PUSH1 0x1 DUP6 PUSH1 0x0 SSTORE DUP5 PUSH1 0x20 SSTORE DUP4 PUSH1 0x40 SSTORE DUP3 PUSH1 0x60 SSTORE DUP2 PUSH1 0x80 SSTORE DUP1 PUSH1 0xA0 SSTORE PUSH1 0xC0 PUSH1 0x0 RETURN INVALID PUSH1 0x12 PUSH1 0x1 DUP2 PUSH1 0x0 SSTORE DUP1 PUSH1 0x20 SSTORE PUSH1 0x40 PUSH1 0x0 RETURN INVALID INVALID INVALID INVALID INVALID INVALID PUSH1 0x12 PUSH1 0x1 DUP2 PUSH1 0x0 SSTORE DUP1 PUSH1 0x20 SSTORE PUSH1 0x40 PUSH1 0x0 RETURN INVALID INVALID
|
||||
// Bytecode: 600060976045603e60846013608360016083600189600055886020558760405586606055856080558460a0558360c0558260e055816101005580610120556101406000f3fe602a6013603d6001603d600185600055846020558360405582606055816080558060a05560c06000f3fe60126001816000558060205560406000f3fefefefe60126001816000558060205560406000f3fefe
|
||||
// Opcodes: PUSH1 0x0 PUSH1 0x97 PUSH1 0x45 PUSH1 0x3E PUSH1 0x84 PUSH1 0x13 PUSH1 0x83 PUSH1 0x1 PUSH1 0x83 PUSH1 0x1 DUP10 PUSH1 0x0 SSTORE DUP9 PUSH1 0x20 SSTORE DUP8 PUSH1 0x40 SSTORE DUP7 PUSH1 0x60 SSTORE DUP6 PUSH1 0x80 SSTORE DUP5 PUSH1 0xA0 SSTORE DUP4 PUSH1 0xC0 SSTORE DUP3 PUSH1 0xE0 SSTORE DUP2 PUSH2 0x100 SSTORE DUP1 PUSH2 0x120 SSTORE PUSH2 0x140 PUSH1 0x0 RETURN INVALID PUSH1 0x2A PUSH1 0x13 PUSH1 0x3D PUSH1 0x1 PUSH1 0x3D PUSH1 0x1 DUP6 PUSH1 0x0 SSTORE DUP5 PUSH1 0x20 SSTORE DUP4 PUSH1 0x40 SSTORE DUP3 PUSH1 0x60 SSTORE DUP2 PUSH1 0x80 SSTORE DUP1 PUSH1 0xA0 SSTORE PUSH1 0xC0 PUSH1 0x0 RETURN INVALID PUSH1 0x12 PUSH1 0x1 DUP2 PUSH1 0x0 SSTORE DUP1 PUSH1 0x20 SSTORE PUSH1 0x40 PUSH1 0x0 RETURN INVALID INVALID INVALID INVALID PUSH1 0x12 PUSH1 0x1 DUP2 PUSH1 0x0 SSTORE DUP1 PUSH1 0x20 SSTORE PUSH1 0x40 PUSH1 0x0 RETURN INVALID INVALID
|
||||
// SourceMappings: 37:15:0:-:0;68:13;97:15;128:13;158:17;192:15;224:17;258:15;291:19;328:17;361:3;358:1;351:14;381:3;377:2;370:15;401:3;397:2;390:15;421:3;417:2;410:15;442:4;437:3;430:17;464:4;459:3;452:17;486:4;481:3;474:17;508:4;503:3;496:17;530:5;525:3;518:18;553:5;548:3;541:18;574:3;571:1;564:14
|
||||
|
Loading…
Reference in New Issue
Block a user