mirror of
https://github.com/ethereum/solidity
synced 2023-10-03 13:03:40 +00:00
Merge pull request #14171 from ethereum/ast-import-via-standard-json
Add support to import AST via Standard JSON.
This commit is contained in:
commit
44a30e47ca
@ -14,6 +14,7 @@ Compiler Features:
|
||||
* Parser: Unary plus is no longer recognized as a unary operator in the AST and triggers an error at the parsing stage (rather than later during the analysis).
|
||||
* SMTChecker: Properties that are proved safe are now reported explicitly at the end of the analysis. By default, only the number of safe properties is shown. The CLI option ``--model-checker-show-proved-safe`` and the JSON option ``settings.modelChecker.showProvedSafe`` can be enabled to show the full list of safe properties.
|
||||
* SMTChecker: Group all messages about unsupported language features in a single warning. The CLI option ``--model-checker-show-unsupported`` and the JSON option ``settings.modelChecker.showUnsupported`` can be enabled to show the full list.
|
||||
* Standard JSON Interface: Add experimental support for importing ASTs via Standard JSON.
|
||||
* Yul EVM Code Transform: If available, use ``push0`` instead of ``codesize`` to produce an arbitrary value on stack in order to create equal stack heights between branches.
|
||||
|
||||
|
||||
|
@ -203,7 +203,7 @@ Input Description
|
||||
.. code-block:: javascript
|
||||
|
||||
{
|
||||
// Required: Source code language. Currently supported are "Solidity" and "Yul".
|
||||
// Required: Source code language. Currently supported are "Solidity", "Yul" and "SolidityAST" (experimental).
|
||||
"language": "Solidity",
|
||||
// Required
|
||||
"sources":
|
||||
@ -230,6 +230,14 @@ Input Description
|
||||
// If files are used, their directories should be added to the command line via
|
||||
// `--allow-paths <path>`.
|
||||
]
|
||||
// If language is set to "SolidityAST", an AST needs to be supplied under the "ast" key.
|
||||
// Note that importing ASTs is experimental and in particular that:
|
||||
// - importing invalid ASTs can produce undefined results and
|
||||
// - no proper error reporting is available on invalid ASTs.
|
||||
// Furthermore, note that the AST import only consumes the fields of the AST as
|
||||
// produced by the compiler in "stopAfter": "parsing" mode and then re-performs
|
||||
// analysis, so any analysis-based annotations of the AST are ignored upon import.
|
||||
"ast": { ... } // formatted as the json ast requested with the ``ast`` output selection.
|
||||
},
|
||||
"destructible":
|
||||
{
|
||||
|
@ -657,72 +657,84 @@ std::variant<StandardCompiler::InputsAndSettings, Json::Value> StandardCompiler:
|
||||
|
||||
ret.errors = Json::arrayValue;
|
||||
|
||||
for (auto const& sourceName: sources.getMemberNames())
|
||||
if (ret.language == "Solidity" || ret.language == "Yul")
|
||||
{
|
||||
string hash;
|
||||
|
||||
if (auto result = checkSourceKeys(sources[sourceName], sourceName))
|
||||
return *result;
|
||||
|
||||
if (sources[sourceName]["keccak256"].isString())
|
||||
hash = sources[sourceName]["keccak256"].asString();
|
||||
|
||||
if (sources[sourceName]["content"].isString())
|
||||
for (auto const& sourceName: sources.getMemberNames())
|
||||
{
|
||||
string content = sources[sourceName]["content"].asString();
|
||||
if (!hash.empty() && !hashMatchesContent(hash, content))
|
||||
ret.errors.append(formatError(
|
||||
Error::Type::IOError,
|
||||
"general",
|
||||
"Mismatch between content and supplied hash for \"" + sourceName + "\""
|
||||
));
|
||||
else
|
||||
ret.sources[sourceName] = content;
|
||||
}
|
||||
else if (sources[sourceName]["urls"].isArray())
|
||||
{
|
||||
if (!m_readFile)
|
||||
return formatFatalError(Error::Type::JSONError, "No import callback supplied, but URL is requested.");
|
||||
string hash;
|
||||
|
||||
vector<string> failures;
|
||||
bool found = false;
|
||||
if (auto result = checkSourceKeys(sources[sourceName], sourceName))
|
||||
return *result;
|
||||
|
||||
for (auto const& url: sources[sourceName]["urls"])
|
||||
if (sources[sourceName]["keccak256"].isString())
|
||||
hash = sources[sourceName]["keccak256"].asString();
|
||||
|
||||
if (sources[sourceName]["content"].isString())
|
||||
{
|
||||
if (!url.isString())
|
||||
return formatFatalError(Error::Type::JSONError, "URL must be a string.");
|
||||
ReadCallback::Result result = m_readFile(ReadCallback::kindString(ReadCallback::Kind::ReadFile), url.asString());
|
||||
if (result.success)
|
||||
{
|
||||
if (!hash.empty() && !hashMatchesContent(hash, result.responseOrErrorMessage))
|
||||
ret.errors.append(formatError(
|
||||
Error::Type::IOError,
|
||||
"general",
|
||||
"Mismatch between content and supplied hash for \"" + sourceName + "\" at \"" + url.asString() + "\""
|
||||
));
|
||||
else
|
||||
{
|
||||
ret.sources[sourceName] = result.responseOrErrorMessage;
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
string content = sources[sourceName]["content"].asString();
|
||||
if (!hash.empty() && !hashMatchesContent(hash, content))
|
||||
ret.errors.append(formatError(
|
||||
Error::Type::IOError,
|
||||
"general",
|
||||
"Mismatch between content and supplied hash for \"" + sourceName + "\""
|
||||
));
|
||||
else
|
||||
failures.push_back("Cannot import url (\"" + url.asString() + "\"): " + result.responseOrErrorMessage);
|
||||
ret.sources[sourceName] = content;
|
||||
}
|
||||
|
||||
for (auto const& failure: failures)
|
||||
else if (sources[sourceName]["urls"].isArray())
|
||||
{
|
||||
/// If the import succeeded, let mark all the others as warnings, otherwise all of them are errors.
|
||||
ret.errors.append(formatError(
|
||||
found ? Error::Type::Warning : Error::Type::IOError,
|
||||
"general",
|
||||
failure
|
||||
));
|
||||
if (!m_readFile)
|
||||
return formatFatalError(
|
||||
Error::Type::JSONError, "No import callback supplied, but URL is requested."
|
||||
);
|
||||
|
||||
vector<string> failures;
|
||||
bool found = false;
|
||||
|
||||
for (auto const& url: sources[sourceName]["urls"])
|
||||
{
|
||||
if (!url.isString())
|
||||
return formatFatalError(Error::Type::JSONError, "URL must be a string.");
|
||||
ReadCallback::Result result = m_readFile(ReadCallback::kindString(ReadCallback::Kind::ReadFile), url.asString());
|
||||
if (result.success)
|
||||
{
|
||||
if (!hash.empty() && !hashMatchesContent(hash, result.responseOrErrorMessage))
|
||||
ret.errors.append(formatError(
|
||||
Error::Type::IOError,
|
||||
"general",
|
||||
"Mismatch between content and supplied hash for \"" + sourceName + "\" at \"" + url.asString() + "\""
|
||||
));
|
||||
else
|
||||
{
|
||||
ret.sources[sourceName] = result.responseOrErrorMessage;
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
else
|
||||
failures.push_back(
|
||||
"Cannot import url (\"" + url.asString() + "\"): " + result.responseOrErrorMessage
|
||||
);
|
||||
}
|
||||
|
||||
for (auto const& failure: failures)
|
||||
{
|
||||
/// If the import succeeded, let mark all the others as warnings, otherwise all of them are errors.
|
||||
ret.errors.append(formatError(
|
||||
found ? Error::Type::Warning : Error::Type::IOError,
|
||||
"general",
|
||||
failure
|
||||
));
|
||||
}
|
||||
}
|
||||
else
|
||||
return formatFatalError(Error::Type::JSONError, "Invalid input source specified.");
|
||||
}
|
||||
else
|
||||
return formatFatalError(Error::Type::JSONError, "Invalid input source specified.");
|
||||
}
|
||||
else if (ret.language == "SolidityAST")
|
||||
{
|
||||
for (auto const& sourceName: sources.getMemberNames())
|
||||
ret.sources[sourceName] = util::jsonCompactPrint(sources[sourceName]);
|
||||
}
|
||||
|
||||
Json::Value const& auxInputs = _input["auxiliaryInput"];
|
||||
@ -1117,7 +1129,24 @@ std::variant<StandardCompiler::InputsAndSettings, Json::Value> StandardCompiler:
|
||||
ret.modelCheckerSettings.timeout = modelCheckerSettings["timeout"].asUInt();
|
||||
}
|
||||
|
||||
return { std::move(ret) };
|
||||
return {std::move(ret)};
|
||||
}
|
||||
|
||||
map<string, Json::Value> StandardCompiler::parseAstFromInput(StringMap const& _sources)
|
||||
{
|
||||
map<string, Json::Value> sourceJsons;
|
||||
for (auto const& [sourceName, sourceCode]: _sources)
|
||||
{
|
||||
Json::Value ast;
|
||||
astAssert(util::jsonParseStrict(sourceCode, ast), "Input file could not be parsed to JSON");
|
||||
string astKey = ast.isMember("ast") ? "ast" : "AST";
|
||||
|
||||
astAssert(ast.isMember(astKey), "astkey is not member");
|
||||
astAssert(ast[astKey]["nodeType"].asString() == "SourceUnit", "Top-level node should be a 'SourceUnit'");
|
||||
astAssert(sourceJsons.count(sourceName) == 0, "All sources must have unique names");
|
||||
sourceJsons.emplace(sourceName, std::move(ast[astKey]));
|
||||
}
|
||||
return sourceJsons;
|
||||
}
|
||||
|
||||
Json::Value StandardCompiler::compileSolidity(StandardCompiler::InputsAndSettings _inputsAndSettings)
|
||||
@ -1125,7 +1154,8 @@ Json::Value StandardCompiler::compileSolidity(StandardCompiler::InputsAndSetting
|
||||
CompilerStack compilerStack(m_readFile);
|
||||
|
||||
StringMap sourceList = std::move(_inputsAndSettings.sources);
|
||||
compilerStack.setSources(sourceList);
|
||||
if (_inputsAndSettings.language == "Solidity")
|
||||
compilerStack.setSources(sourceList);
|
||||
for (auto const& smtLib2Response: _inputsAndSettings.smtLib2Responses)
|
||||
compilerStack.addSMTLib2Response(smtLib2Response.first, smtLib2Response.second);
|
||||
compilerStack.setViaIR(_inputsAndSettings.viaIR);
|
||||
@ -1153,23 +1183,37 @@ Json::Value StandardCompiler::compileSolidity(StandardCompiler::InputsAndSetting
|
||||
|
||||
try
|
||||
{
|
||||
if (binariesRequested)
|
||||
compilerStack.compile();
|
||||
else
|
||||
compilerStack.parseAndAnalyze(_inputsAndSettings.stopAfter);
|
||||
|
||||
for (auto const& error: compilerStack.errors())
|
||||
if (_inputsAndSettings.language == "SolidityAST")
|
||||
{
|
||||
Error const& err = dynamic_cast<Error const&>(*error);
|
||||
try
|
||||
{
|
||||
compilerStack.importASTs(parseAstFromInput(sourceList));
|
||||
if (!compilerStack.analyze())
|
||||
errors.append(formatError(Error::Type::FatalError, "general", "Analysis of the AST failed."));
|
||||
if (binariesRequested)
|
||||
compilerStack.compile();
|
||||
}
|
||||
catch (util::Exception const& _exc)
|
||||
{
|
||||
solThrow(util::Exception, "Failed to import AST: "s + _exc.what());
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (binariesRequested)
|
||||
compilerStack.compile();
|
||||
else
|
||||
compilerStack.parseAndAnalyze(_inputsAndSettings.stopAfter);
|
||||
|
||||
errors.append(formatErrorWithException(
|
||||
compilerStack,
|
||||
*error,
|
||||
err.type(),
|
||||
"general",
|
||||
"",
|
||||
err.errorId()
|
||||
));
|
||||
for (auto const& error: compilerStack.errors())
|
||||
errors.append(formatErrorWithException(
|
||||
compilerStack,
|
||||
*error,
|
||||
error->type(),
|
||||
"general",
|
||||
"",
|
||||
error->errorId()
|
||||
));
|
||||
}
|
||||
}
|
||||
/// This is only thrown in a very few locations.
|
||||
@ -1558,8 +1602,10 @@ Json::Value StandardCompiler::compile(Json::Value const& _input) noexcept
|
||||
return compileSolidity(std::move(settings));
|
||||
else if (settings.language == "Yul")
|
||||
return compileYul(std::move(settings));
|
||||
else if (settings.language == "SolidityAST")
|
||||
return compileSolidity(std::move(settings));
|
||||
else
|
||||
return formatFatalError(Error::Type::JSONError, "Only \"Solidity\" or \"Yul\" is supported as a language.");
|
||||
return formatFatalError(Error::Type::JSONError, "Only \"Solidity\", \"Yul\" or \"SolidityAST\" is supported as a language.");
|
||||
}
|
||||
catch (Json::LogicError const& _exception)
|
||||
{
|
||||
|
@ -95,6 +95,7 @@ private:
|
||||
/// it in condensed form or an error as a json object.
|
||||
std::variant<InputsAndSettings, Json::Value> parseInput(Json::Value const& _input);
|
||||
|
||||
std::map<std::string, Json::Value> parseAstFromInput(StringMap const& _sources);
|
||||
Json::Value compileSolidity(InputsAndSettings _inputsAndSettings);
|
||||
Json::Value compileYul(InputsAndSettings _inputsAndSettings);
|
||||
|
||||
|
@ -101,6 +101,7 @@ function test_ast_import_export_equivalence
|
||||
|
||||
local export_command=("$SOLC" --combined-json ast --pretty-json --json-indent 4 "${input_files[@]}")
|
||||
local import_command=("$SOLC" --import-ast --combined-json ast --pretty-json --json-indent 4 expected.json)
|
||||
local import_via_standard_json_command=("$SOLC" --combined-json ast --pretty-json --json-indent 4 --standard-json standard_json_input.json)
|
||||
|
||||
# export ast - save ast json as expected result (silently)
|
||||
if ! "${export_command[@]}" > expected.json 2> stderr_export.txt
|
||||
@ -118,8 +119,28 @@ function test_ast_import_export_equivalence
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo ". += {\"sources\":" > _ast_json.json
|
||||
jq .sources expected.json >> _ast_json.json
|
||||
echo "}" >> _ast_json.json
|
||||
echo "{\"language\": \"SolidityAST\", \"settings\": {\"outputSelection\": {\"*\": {\"\": [\"ast\"]}}}}" > standard_json.json
|
||||
jq --from-file _ast_json.json standard_json.json > standard_json_input.json
|
||||
|
||||
# (re)import ast via standard json - and export it again as obtained result (silently)
|
||||
if ! "${import_via_standard_json_command[@]}" > obtained_standard_json.json 2> stderr_import.txt
|
||||
then
|
||||
print_stderr_stdout "ERROR: AST reimport failed (import) for input file ${sol_file}." ./stderr_import.txt ./obtained_standard_json.json
|
||||
print_used_commands "$(pwd)" "${export_command[*]} > expected.json" "${import_command[*]}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
jq .sources expected.json > expected_standard_json.json
|
||||
jq .sources obtained_standard_json.json > obtained_standard_json_.json
|
||||
jq 'walk(if type == "object" and has("ast") then .AST = .ast | del(.ast) else . end)' < obtained_standard_json_.json > obtained_standard_json.json
|
||||
jq --sort-keys . < obtained_standard_json.json > obtained_standard_json_.json
|
||||
mv obtained_standard_json_.json obtained_standard_json.json
|
||||
|
||||
# compare expected and obtained ASTs
|
||||
if ! diff_files expected.json obtained.json
|
||||
if ! diff_files expected.json obtained.json || ! diff_files expected_standard_json.json obtained_standard_json.json
|
||||
then
|
||||
printError "ERROR: AST reimport failed for ${sol_file}"
|
||||
if (( EXIT_ON_ERROR == 1 ))
|
||||
|
@ -879,9 +879,12 @@ void CommandLineInterface::handleCombinedJSON()
|
||||
output[g_strSources] = Json::Value(Json::objectValue);
|
||||
for (auto const& sourceCode: m_fileReader.sourceUnits())
|
||||
{
|
||||
ASTJsonExporter converter(m_compiler->state(), m_compiler->sourceIndices());
|
||||
output[g_strSources][sourceCode.first] = Json::Value(Json::objectValue);
|
||||
output[g_strSources][sourceCode.first]["AST"] = converter.toJson(m_compiler->ast(sourceCode.first));
|
||||
output[g_strSources][sourceCode.first]["AST"] = ASTJsonExporter(
|
||||
m_compiler->state(),
|
||||
m_compiler->sourceIndices()
|
||||
).toJson(m_compiler->ast(sourceCode.first));
|
||||
output[g_strSources][sourceCode.first]["id"] = m_compiler->sourceIndices().at(sourceCode.first);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,8 @@
|
||||
}
|
||||
],
|
||||
"src": "36:38:0"
|
||||
}
|
||||
},
|
||||
"id": 0
|
||||
},
|
||||
"combined_json_with_base_path/input.sol":
|
||||
{
|
||||
@ -105,7 +106,8 @@
|
||||
}
|
||||
],
|
||||
"src": "36:42:1"
|
||||
}
|
||||
},
|
||||
"id": 1
|
||||
}
|
||||
},
|
||||
"version": "<VERSION REMOVED>"
|
||||
|
@ -29,7 +29,8 @@
|
||||
}
|
||||
],
|
||||
"src": "36:22:0"
|
||||
}
|
||||
},
|
||||
"id": 0
|
||||
}
|
||||
},
|
||||
"version": "<VERSION REMOVED>"
|
||||
|
94
test/cmdlineTests/standard_import_ast/input.json
Normal file
94
test/cmdlineTests/standard_import_ast/input.json
Normal file
@ -0,0 +1,94 @@
|
||||
{
|
||||
"language": "SolidityAST",
|
||||
"sources": {
|
||||
"A": {
|
||||
"ast": {
|
||||
"absolutePath": "A",
|
||||
"exportedSymbols": {
|
||||
"C": [
|
||||
6
|
||||
]
|
||||
},
|
||||
"id": 7,
|
||||
"license": "GPL-3.0",
|
||||
"nodeType": "SourceUnit",
|
||||
"nodes": [
|
||||
{
|
||||
"id": 1,
|
||||
"literals": [
|
||||
"solidity",
|
||||
">=",
|
||||
"0.0"
|
||||
],
|
||||
"nodeType": "PragmaDirective",
|
||||
"src": "36:22:0"
|
||||
},
|
||||
{
|
||||
"abstract": false,
|
||||
"baseContracts": [],
|
||||
"canonicalName": "C",
|
||||
"contractDependencies": [],
|
||||
"contractKind": "contract",
|
||||
"fullyImplemented": true,
|
||||
"id": 6,
|
||||
"linearizedBaseContracts": [
|
||||
6
|
||||
],
|
||||
"name": "C",
|
||||
"nameLocation": "68:1:0",
|
||||
"nodeType": "ContractDefinition",
|
||||
"nodes": [
|
||||
{
|
||||
"body": {
|
||||
"id": 4,
|
||||
"nodeType": "Block",
|
||||
"src": "97:2:0",
|
||||
"statements": []
|
||||
},
|
||||
"functionSelector": "26121ff0",
|
||||
"id": 5,
|
||||
"implemented": true,
|
||||
"kind": "function",
|
||||
"modifiers": [],
|
||||
"name": "f",
|
||||
"nameLocation": "81:1:0",
|
||||
"nodeType": "FunctionDefinition",
|
||||
"parameters": {
|
||||
"id": 2,
|
||||
"nodeType": "ParameterList",
|
||||
"parameters": [],
|
||||
"src": "82:2:0"
|
||||
},
|
||||
"returnParameters": {
|
||||
"id": 3,
|
||||
"nodeType": "ParameterList",
|
||||
"parameters": [],
|
||||
"src": "97:0:0"
|
||||
},
|
||||
"scope": 6,
|
||||
"src": "72:27:0",
|
||||
"stateMutability": "pure",
|
||||
"virtual": false,
|
||||
"visibility": "public"
|
||||
}
|
||||
],
|
||||
"scope": 7,
|
||||
"src": "59:42:0",
|
||||
"usedErrors": []
|
||||
}
|
||||
],
|
||||
"src": "36:65:0"
|
||||
},
|
||||
"id": 0
|
||||
}
|
||||
},
|
||||
"settings": {
|
||||
"outputSelection": {
|
||||
"*": {
|
||||
"": [
|
||||
"ast"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
97
test/cmdlineTests/standard_import_ast/output.json
Normal file
97
test/cmdlineTests/standard_import_ast/output.json
Normal file
@ -0,0 +1,97 @@
|
||||
{
|
||||
"sources":
|
||||
{
|
||||
"A":
|
||||
{
|
||||
"ast":
|
||||
{
|
||||
"absolutePath": "A",
|
||||
"exportedSymbols":
|
||||
{
|
||||
"C":
|
||||
[
|
||||
6
|
||||
]
|
||||
},
|
||||
"id": 7,
|
||||
"license": "GPL-3.0",
|
||||
"nodeType": "SourceUnit",
|
||||
"nodes":
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"literals":
|
||||
[
|
||||
"solidity",
|
||||
">=",
|
||||
"0.0"
|
||||
],
|
||||
"nodeType": "PragmaDirective",
|
||||
"src": "36:22:0"
|
||||
},
|
||||
{
|
||||
"abstract": false,
|
||||
"baseContracts": [],
|
||||
"canonicalName": "C",
|
||||
"contractDependencies": [],
|
||||
"contractKind": "contract",
|
||||
"fullyImplemented": true,
|
||||
"id": 6,
|
||||
"linearizedBaseContracts":
|
||||
[
|
||||
6
|
||||
],
|
||||
"name": "C",
|
||||
"nameLocation": "68:1:0",
|
||||
"nodeType": "ContractDefinition",
|
||||
"nodes":
|
||||
[
|
||||
{
|
||||
"body":
|
||||
{
|
||||
"id": 4,
|
||||
"nodeType": "Block",
|
||||
"src": "97:2:0",
|
||||
"statements": []
|
||||
},
|
||||
"functionSelector": "26121ff0",
|
||||
"id": 5,
|
||||
"implemented": true,
|
||||
"kind": "function",
|
||||
"modifiers": [],
|
||||
"name": "f",
|
||||
"nameLocation": "81:1:0",
|
||||
"nodeType": "FunctionDefinition",
|
||||
"parameters":
|
||||
{
|
||||
"id": 2,
|
||||
"nodeType": "ParameterList",
|
||||
"parameters": [],
|
||||
"src": "82:2:0"
|
||||
},
|
||||
"returnParameters":
|
||||
{
|
||||
"id": 3,
|
||||
"nodeType": "ParameterList",
|
||||
"parameters": [],
|
||||
"src": "97:0:0"
|
||||
},
|
||||
"scope": 6,
|
||||
"src": "72:27:0",
|
||||
"stateMutability": "pure",
|
||||
"virtual": false,
|
||||
"visibility": "public"
|
||||
}
|
||||
],
|
||||
"scope": 7,
|
||||
"src": "59:42:0",
|
||||
"usedErrors": [],
|
||||
"usedEvents": []
|
||||
}
|
||||
],
|
||||
"src": "36:65:0"
|
||||
},
|
||||
"id": 0
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
{
|
||||
"language": "SolidityAST",
|
||||
"sources": {
|
||||
"A": {
|
||||
"ast": {
|
||||
"absolutePath": "empty_contract.sol",
|
||||
"exportedSymbols": {
|
||||
"test": [
|
||||
1
|
||||
]
|
||||
},
|
||||
"id": 2,
|
||||
"nodeType": "SourceUnit",
|
||||
"nodes": [
|
||||
{
|
||||
"abstract": false,
|
||||
"baseContracts": [],
|
||||
"canonicalName": "test",
|
||||
"contractDependencies": [],
|
||||
"contractKind": "contract",
|
||||
"fullyImplemented": true,
|
||||
"id": 1,
|
||||
"linearizedBaseContracts": [
|
||||
1
|
||||
],
|
||||
"name": "test",
|
||||
"nameLocation": "9:4:0",
|
||||
"nodeType": "ContractDefinition",
|
||||
"nodes": [],
|
||||
"scope": 2,
|
||||
"src": "0:17:0",
|
||||
"usedErrors": []
|
||||
}
|
||||
],
|
||||
"src": "0:124:0"
|
||||
},
|
||||
"id": 0
|
||||
}
|
||||
},
|
||||
"settings": {
|
||||
"outputSelection": {
|
||||
"*": {
|
||||
"*": [
|
||||
"evm.bytecode",
|
||||
"evm.bytecode.sourceMap"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
{
|
||||
"contracts":
|
||||
{
|
||||
"A":
|
||||
{
|
||||
"test":
|
||||
{
|
||||
"evm":
|
||||
{
|
||||
"bytecode":
|
||||
{
|
||||
"functionDebugData": {},
|
||||
"generatedSources": [],
|
||||
"linkReferences": {},
|
||||
"object": "<BYTECODE REMOVED>",
|
||||
"opcodes":"<OPCODES REMOVED>",
|
||||
"sourceMap":"<SOURCEMAP REMOVED>"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"sources":
|
||||
{
|
||||
"A":
|
||||
{
|
||||
"id": 0
|
||||
}
|
||||
}
|
||||
}
|
@ -187,7 +187,7 @@ BOOST_AUTO_TEST_CASE(invalid_language)
|
||||
}
|
||||
)";
|
||||
Json::Value result = compile(input);
|
||||
BOOST_CHECK(containsError(result, "JSONError", "Only \"Solidity\" or \"Yul\" is supported as a language."));
|
||||
BOOST_CHECK(containsError(result, "JSONError", "Only \"Solidity\", \"Yul\" or \"SolidityAST\" is supported as a language."));
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(valid_language)
|
||||
|
Loading…
Reference in New Issue
Block a user