2020-01-13 15:14:18 +00:00
|
|
|
#!/usr/bin/env python3
|
2017-03-22 19:19:57 +00:00
|
|
|
|
|
|
|
import sys
|
|
|
|
import subprocess
|
|
|
|
import json
|
2020-12-22 02:03:17 +00:00
|
|
|
import re
|
2020-12-16 10:21:34 +00:00
|
|
|
from argparse import ArgumentParser
|
|
|
|
from dataclasses import dataclass
|
2020-12-22 02:03:17 +00:00
|
|
|
from enum import Enum
|
2020-12-16 10:21:34 +00:00
|
|
|
from glob import glob
|
|
|
|
from pathlib import Path
|
2020-12-22 02:03:17 +00:00
|
|
|
from tempfile import TemporaryDirectory
|
2020-12-16 10:21:34 +00:00
|
|
|
from typing import List, Optional, Tuple, Union
|
2017-03-22 19:19:57 +00:00
|
|
|
|
2020-12-16 10:21:34 +00:00
|
|
|
|
2020-12-22 04:59:46 +00:00
|
|
|
CONTRACT_SEPARATOR_PATTERN = re.compile(
|
|
|
|
r'^ *======= +(?:(?P<file_name>.+) *:)? *(?P<contract_name>[^:]+) +======= *$',
|
|
|
|
re.MULTILINE
|
|
|
|
)
|
2020-12-21 23:55:52 +00:00
|
|
|
BYTECODE_REGEX = re.compile(r'^ *Binary: *\n(?P<bytecode>.*[0-9a-f$_]+.*)$', re.MULTILINE)
|
|
|
|
METADATA_REGEX = re.compile(r'^ *Metadata: *\n *(?P<metadata>\{.*\}) *$', re.MULTILINE)
|
2020-12-22 02:03:17 +00:00
|
|
|
|
|
|
|
|
|
|
|
class CompilerInterface(Enum):
|
|
|
|
CLI = 'cli'
|
|
|
|
STANDARD_JSON = 'standard-json'
|
|
|
|
|
|
|
|
|
2020-12-22 06:46:31 +00:00
|
|
|
class SMTUse(Enum):
|
|
|
|
PRESERVE = 'preserve'
|
|
|
|
DISABLE = 'disable'
|
|
|
|
STRIP_PRAGMAS = 'strip-pragmas'
|
|
|
|
|
|
|
|
|
2020-12-16 10:21:34 +00:00
|
|
|
@dataclass(frozen=True)
|
|
|
|
class ContractReport:
|
|
|
|
contract_name: str
|
2020-12-21 23:55:52 +00:00
|
|
|
file_name: Optional[Path]
|
2020-12-16 10:21:34 +00:00
|
|
|
bytecode: Optional[str]
|
|
|
|
metadata: Optional[str]
|
|
|
|
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class FileReport:
|
|
|
|
file_name: Path
|
|
|
|
contract_reports: Optional[List[ContractReport]]
|
|
|
|
|
|
|
|
def format_report(self) -> str:
|
|
|
|
report = ""
|
|
|
|
|
|
|
|
if self.contract_reports is None:
|
2021-01-19 16:40:53 +00:00
|
|
|
return f"{self.file_name.as_posix()}: <ERROR>\n"
|
2020-12-16 10:21:34 +00:00
|
|
|
|
|
|
|
for contract_report in self.contract_reports:
|
|
|
|
bytecode = contract_report.bytecode if contract_report.bytecode is not None else '<NO BYTECODE>'
|
|
|
|
metadata = contract_report.metadata if contract_report.metadata is not None else '<NO METADATA>'
|
|
|
|
|
|
|
|
# NOTE: Ignoring contract_report.file_name because it should always be either the same
|
|
|
|
# as self.file_name (for Standard JSON) or just the '<stdin>' placeholder (for CLI).
|
2021-01-19 16:40:53 +00:00
|
|
|
report += f"{self.file_name.as_posix()}:{contract_report.contract_name} {bytecode}\n"
|
|
|
|
report += f"{self.file_name.as_posix()}:{contract_report.contract_name} {metadata}\n"
|
2020-12-16 10:21:34 +00:00
|
|
|
|
|
|
|
return report
|
|
|
|
|
|
|
|
|
2020-12-22 06:46:31 +00:00
|
|
|
def load_source(path: Union[Path, str], smt_use: SMTUse) -> str:
|
2021-01-19 15:56:27 +00:00
|
|
|
# NOTE: newline='' disables newline conversion.
|
|
|
|
# We want the file exactly as is because changing even a single byte in the source affects metadata.
|
|
|
|
with open(path, mode='r', encoding='utf8', newline='') as source_file:
|
2020-12-16 10:21:34 +00:00
|
|
|
file_content = source_file.read()
|
|
|
|
|
2020-12-22 06:46:31 +00:00
|
|
|
if smt_use == SMTUse.STRIP_PRAGMAS:
|
|
|
|
return file_content.replace('pragma experimental SMTChecker;', '', 1)
|
|
|
|
|
2020-12-16 10:21:34 +00:00
|
|
|
return file_content
|
|
|
|
|
|
|
|
|
2020-12-21 23:55:52 +00:00
|
|
|
def clean_string(value: Optional[str]) -> Optional[str]:
|
|
|
|
value = value.strip() if value is not None else None
|
|
|
|
return value if value != '' else None
|
|
|
|
|
|
|
|
|
2020-12-16 10:21:34 +00:00
|
|
|
def parse_standard_json_output(source_file_name: Path, standard_json_output: str) -> FileReport:
|
|
|
|
decoded_json_output = json.loads(standard_json_output.strip())
|
|
|
|
|
2020-12-21 20:16:35 +00:00
|
|
|
# JSON interface still returns contract metadata in case of an internal compiler error while
|
|
|
|
# CLI interface does not. To make reports comparable we must force this case to be detected as
|
|
|
|
# an error in both cases.
|
|
|
|
internal_compiler_error = any(
|
|
|
|
error['type'] in ['UnimplementedFeatureError', 'CompilerError', 'CodeGenerationError']
|
|
|
|
for error in decoded_json_output.get('errors', {})
|
|
|
|
)
|
|
|
|
|
2020-12-16 10:21:34 +00:00
|
|
|
if (
|
|
|
|
'contracts' not in decoded_json_output or
|
|
|
|
len(decoded_json_output['contracts']) == 0 or
|
2020-12-21 20:16:35 +00:00
|
|
|
all(len(file_results) == 0 for file_name, file_results in decoded_json_output['contracts'].items()) or
|
|
|
|
internal_compiler_error
|
2020-12-16 10:21:34 +00:00
|
|
|
):
|
|
|
|
return FileReport(file_name=source_file_name, contract_reports=None)
|
|
|
|
|
|
|
|
file_report = FileReport(file_name=source_file_name, contract_reports=[])
|
|
|
|
for file_name, file_results in sorted(decoded_json_output['contracts'].items()):
|
|
|
|
for contract_name, contract_results in sorted(file_results.items()):
|
|
|
|
assert file_report.contract_reports is not None
|
|
|
|
file_report.contract_reports.append(ContractReport(
|
|
|
|
contract_name=contract_name,
|
|
|
|
file_name=Path(file_name),
|
2020-12-21 23:55:52 +00:00
|
|
|
bytecode=clean_string(contract_results.get('evm', {}).get('bytecode', {}).get('object')),
|
|
|
|
metadata=clean_string(contract_results.get('metadata')),
|
2020-12-16 10:21:34 +00:00
|
|
|
))
|
|
|
|
|
|
|
|
return file_report
|
|
|
|
|
|
|
|
|
2020-12-22 02:03:17 +00:00
|
|
|
def parse_cli_output(source_file_name: Path, cli_output: str) -> FileReport:
|
|
|
|
# re.split() returns a list containing the text between pattern occurrences but also inserts the
|
|
|
|
# content of matched groups in between. It also never omits the empty elements so the number of
|
|
|
|
# list items is predictable (3 per match + the text before the first match)
|
|
|
|
output_segments = re.split(CONTRACT_SEPARATOR_PATTERN, cli_output)
|
|
|
|
assert len(output_segments) % 3 == 1
|
2020-12-16 10:21:34 +00:00
|
|
|
|
2020-12-22 02:03:17 +00:00
|
|
|
if len(output_segments) == 1:
|
|
|
|
return FileReport(file_name=source_file_name, contract_reports=None)
|
2020-12-16 10:21:34 +00:00
|
|
|
|
2020-12-22 02:03:17 +00:00
|
|
|
file_report = FileReport(file_name=source_file_name, contract_reports=[])
|
|
|
|
for file_name, contract_name, contract_output in zip(output_segments[1::3], output_segments[2::3], output_segments[3::3]):
|
|
|
|
bytecode_match = re.search(BYTECODE_REGEX, contract_output)
|
|
|
|
metadata_match = re.search(METADATA_REGEX, contract_output)
|
|
|
|
|
|
|
|
assert file_report.contract_reports is not None
|
|
|
|
file_report.contract_reports.append(ContractReport(
|
2020-12-21 23:55:52 +00:00
|
|
|
contract_name=contract_name.strip(),
|
|
|
|
file_name=Path(file_name.strip()) if file_name is not None else None,
|
|
|
|
bytecode=clean_string(bytecode_match['bytecode'] if bytecode_match is not None else None),
|
|
|
|
metadata=clean_string(metadata_match['metadata'] if metadata_match is not None else None),
|
2020-12-22 02:03:17 +00:00
|
|
|
))
|
2020-12-16 10:21:34 +00:00
|
|
|
|
2020-12-22 02:03:17 +00:00
|
|
|
return file_report
|
2020-12-16 10:21:34 +00:00
|
|
|
|
|
|
|
|
2020-12-22 04:59:46 +00:00
|
|
|
def prepare_compiler_input( # pylint: disable=too-many-arguments
|
2020-12-22 02:03:17 +00:00
|
|
|
compiler_path: Path,
|
|
|
|
source_file_name: Path,
|
|
|
|
optimize: bool,
|
2020-12-22 04:59:46 +00:00
|
|
|
force_no_optimize_yul: bool,
|
2020-12-22 06:46:31 +00:00
|
|
|
interface: CompilerInterface,
|
|
|
|
smt_use: SMTUse,
|
2021-01-22 22:03:53 +00:00
|
|
|
metadata_option_supported: bool,
|
2020-12-22 02:03:17 +00:00
|
|
|
) -> Tuple[List[str], str]:
|
2020-12-22 04:59:46 +00:00
|
|
|
|
2020-12-22 02:03:17 +00:00
|
|
|
if interface == CompilerInterface.STANDARD_JSON:
|
|
|
|
json_input: dict = {
|
|
|
|
'language': 'Solidity',
|
|
|
|
'sources': {
|
2020-12-22 06:46:31 +00:00
|
|
|
str(source_file_name): {'content': load_source(source_file_name, smt_use)}
|
2020-12-22 02:03:17 +00:00
|
|
|
},
|
|
|
|
'settings': {
|
|
|
|
'optimizer': {'enabled': optimize},
|
|
|
|
'outputSelection': {'*': {'*': ['evm.bytecode.object', 'metadata']}},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-22 06:46:31 +00:00
|
|
|
if smt_use == SMTUse.DISABLE:
|
|
|
|
json_input['settings']['modelChecker'] = {'engine': 'none'}
|
|
|
|
|
2020-12-22 02:03:17 +00:00
|
|
|
command_line = [str(compiler_path), '--standard-json']
|
|
|
|
compiler_input = json.dumps(json_input)
|
|
|
|
else:
|
|
|
|
assert interface == CompilerInterface.CLI
|
2020-12-16 10:21:34 +00:00
|
|
|
|
2021-01-22 22:03:53 +00:00
|
|
|
compiler_options = [str(source_file_name), '--bin']
|
|
|
|
if metadata_option_supported:
|
|
|
|
compiler_options.append('--metadata')
|
2020-12-22 02:03:17 +00:00
|
|
|
if optimize:
|
|
|
|
compiler_options.append('--optimize')
|
2020-12-22 04:59:46 +00:00
|
|
|
elif force_no_optimize_yul:
|
|
|
|
compiler_options.append('--no-optimize-yul')
|
2020-12-22 06:46:31 +00:00
|
|
|
if smt_use == SMTUse.DISABLE:
|
|
|
|
compiler_options += ['--model-checker-engine', 'none']
|
2020-12-16 10:21:34 +00:00
|
|
|
|
2020-12-22 02:03:17 +00:00
|
|
|
command_line = [str(compiler_path)] + compiler_options
|
2020-12-22 06:46:31 +00:00
|
|
|
compiler_input = load_source(source_file_name, smt_use)
|
2020-12-16 10:21:34 +00:00
|
|
|
|
2020-12-22 02:03:17 +00:00
|
|
|
return (command_line, compiler_input)
|
|
|
|
|
|
|
|
|
2021-01-22 22:03:53 +00:00
|
|
|
def detect_metadata_cli_option_support(compiler_path: Path):
|
|
|
|
process = subprocess.run(
|
|
|
|
[str(compiler_path.absolute()), '--metadata', '-'],
|
|
|
|
input="contract C {}",
|
|
|
|
encoding='utf8',
|
|
|
|
capture_output=True,
|
|
|
|
check=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
negative_response = "unrecognised option '--metadata'".strip()
|
|
|
|
if (process.returncode == 0) != (process.stderr.strip() != negative_response):
|
|
|
|
# If the error is other than expected or there's an error message but no error, don't try
|
|
|
|
# to guess. Just fail.
|
|
|
|
print(
|
|
|
|
f"Compiler exit code: {process.returncode}\n"
|
|
|
|
f"Compiler output:\n{process.stderr}\n",
|
|
|
|
file=sys.stderr
|
|
|
|
)
|
|
|
|
raise Exception("Failed to determine if the compiler supports the --metadata option.")
|
|
|
|
|
|
|
|
return process.returncode == 0
|
|
|
|
|
|
|
|
|
2020-12-22 04:59:46 +00:00
|
|
|
def run_compiler( # pylint: disable=too-many-arguments
|
2020-12-22 02:03:17 +00:00
|
|
|
compiler_path: Path,
|
|
|
|
source_file_name: Path,
|
|
|
|
optimize: bool,
|
2020-12-22 04:59:46 +00:00
|
|
|
force_no_optimize_yul: bool,
|
2020-12-22 02:03:17 +00:00
|
|
|
interface: CompilerInterface,
|
2020-12-22 06:46:31 +00:00
|
|
|
smt_use: SMTUse,
|
2021-01-22 22:03:53 +00:00
|
|
|
metadata_option_supported: bool,
|
2020-12-22 02:03:17 +00:00
|
|
|
tmp_dir: Path,
|
|
|
|
) -> FileReport:
|
|
|
|
|
|
|
|
if interface == CompilerInterface.STANDARD_JSON:
|
|
|
|
(command_line, compiler_input) = prepare_compiler_input(
|
|
|
|
compiler_path,
|
|
|
|
Path(source_file_name.name),
|
|
|
|
optimize,
|
2020-12-22 04:59:46 +00:00
|
|
|
force_no_optimize_yul,
|
2020-12-22 06:46:31 +00:00
|
|
|
interface,
|
|
|
|
smt_use,
|
2021-01-22 22:03:53 +00:00
|
|
|
metadata_option_supported,
|
2020-12-22 02:03:17 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
process = subprocess.run(
|
|
|
|
command_line,
|
|
|
|
input=compiler_input,
|
|
|
|
encoding='utf8',
|
|
|
|
capture_output=True,
|
|
|
|
check=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
return parse_standard_json_output(Path(source_file_name), process.stdout)
|
|
|
|
else:
|
|
|
|
assert interface == CompilerInterface.CLI
|
|
|
|
assert tmp_dir is not None
|
|
|
|
|
|
|
|
(command_line, compiler_input) = prepare_compiler_input(
|
|
|
|
compiler_path.absolute(),
|
|
|
|
Path(source_file_name.name),
|
|
|
|
optimize,
|
2020-12-22 04:59:46 +00:00
|
|
|
force_no_optimize_yul,
|
2020-12-22 06:46:31 +00:00
|
|
|
interface,
|
|
|
|
smt_use,
|
2021-01-22 22:03:53 +00:00
|
|
|
metadata_option_supported,
|
2020-12-22 02:03:17 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
# Create a copy that we can use directly with the CLI interface
|
|
|
|
modified_source_path = tmp_dir / source_file_name.name
|
|
|
|
# NOTE: newline='' disables newline conversion.
|
|
|
|
# We want the file exactly as is because changing even a single byte in the source affects metadata.
|
|
|
|
with open(modified_source_path, 'w', encoding='utf8', newline='') as modified_source_file:
|
|
|
|
modified_source_file.write(compiler_input)
|
|
|
|
|
|
|
|
process = subprocess.run(
|
|
|
|
command_line,
|
|
|
|
cwd=tmp_dir,
|
|
|
|
encoding='utf8',
|
|
|
|
capture_output=True,
|
|
|
|
check=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
return parse_cli_output(Path(source_file_name), process.stdout)
|
|
|
|
|
|
|
|
|
2020-12-22 04:59:46 +00:00
|
|
|
def generate_report(
|
|
|
|
source_file_names: List[str],
|
|
|
|
compiler_path: Path,
|
|
|
|
interface: CompilerInterface,
|
|
|
|
smt_use: SMTUse,
|
|
|
|
force_no_optimize_yul: bool
|
|
|
|
):
|
2021-01-22 22:03:53 +00:00
|
|
|
metadata_option_supported = detect_metadata_cli_option_support(compiler_path)
|
|
|
|
|
2020-12-16 10:21:34 +00:00
|
|
|
with open('report.txt', mode='w', encoding='utf8', newline='\n') as report_file:
|
|
|
|
for optimize in [False, True]:
|
2020-12-22 02:03:17 +00:00
|
|
|
with TemporaryDirectory(prefix='prepare_report-') as tmp_dir:
|
|
|
|
for source_file_name in sorted(source_file_names):
|
|
|
|
try:
|
2020-12-22 06:46:31 +00:00
|
|
|
report = run_compiler(
|
|
|
|
compiler_path,
|
|
|
|
Path(source_file_name),
|
|
|
|
optimize,
|
2020-12-22 04:59:46 +00:00
|
|
|
force_no_optimize_yul,
|
2020-12-22 06:46:31 +00:00
|
|
|
interface,
|
|
|
|
smt_use,
|
2021-01-22 22:03:53 +00:00
|
|
|
metadata_option_supported,
|
2020-12-22 06:46:31 +00:00
|
|
|
Path(tmp_dir),
|
|
|
|
)
|
2020-12-22 02:03:17 +00:00
|
|
|
report_file.write(report.format_report())
|
|
|
|
except subprocess.CalledProcessError as exception:
|
|
|
|
print(
|
|
|
|
f"\n\nInterrupted by an exception while processing file "
|
|
|
|
f"'{source_file_name}' with optimize={optimize}\n\n"
|
|
|
|
f"COMPILER STDOUT:\n{exception.stdout}\n"
|
|
|
|
f"COMPILER STDERR:\n{exception.stderr}\n",
|
|
|
|
file=sys.stderr
|
|
|
|
)
|
|
|
|
raise
|
|
|
|
except:
|
|
|
|
print(
|
|
|
|
f"\n\nInterrupted by an exception while processing file "
|
|
|
|
f"'{source_file_name}' with optimize={optimize}\n",
|
|
|
|
file=sys.stderr
|
|
|
|
)
|
|
|
|
raise
|
2020-12-16 10:21:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
def commandline_parser() -> ArgumentParser:
|
|
|
|
script_description = (
|
|
|
|
"Generates a report listing bytecode and metadata obtained by compiling all the "
|
|
|
|
"*.sol files found in the current working directory using the provided binary."
|
|
|
|
)
|
|
|
|
|
|
|
|
parser = ArgumentParser(description=script_description)
|
|
|
|
parser.add_argument(dest='compiler_path', help="Solidity compiler executable")
|
2020-12-22 02:03:17 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--interface',
|
|
|
|
dest='interface',
|
|
|
|
default=CompilerInterface.STANDARD_JSON.value,
|
|
|
|
choices=[c.value for c in CompilerInterface],
|
2020-12-22 06:46:31 +00:00
|
|
|
help="Compiler interface to use.",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'--smt-use',
|
|
|
|
dest='smt_use',
|
|
|
|
default=SMTUse.DISABLE.value,
|
|
|
|
choices=[s.value for s in SMTUse],
|
|
|
|
help="What to do about contracts that use the experimental SMT checker."
|
2020-12-22 02:03:17 +00:00
|
|
|
)
|
2020-12-22 04:59:46 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--force-no-optimize-yul',
|
|
|
|
dest='force_no_optimize_yul',
|
|
|
|
default=False,
|
|
|
|
action='store_true',
|
|
|
|
help="Explicitly disable Yul optimizer in CLI runs without optimization to work around a bug in solc 0.6.0 and 0.6.1."
|
|
|
|
)
|
2020-12-16 10:21:34 +00:00
|
|
|
return parser;
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
options = commandline_parser().parse_args()
|
|
|
|
generate_report(
|
|
|
|
glob("*.sol"),
|
|
|
|
Path(options.compiler_path),
|
2020-12-22 02:03:17 +00:00
|
|
|
CompilerInterface(options.interface),
|
2020-12-22 06:46:31 +00:00
|
|
|
SMTUse(options.smt_use),
|
2020-12-22 04:59:46 +00:00
|
|
|
options.force_no_optimize_yul,
|
2020-12-16 10:21:34 +00:00
|
|
|
)
|