/*
This file is part of solidity.
solidity is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
solidity is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with solidity. If not, see .
*/
/**
* @author Christian
* @author Gav Wood
* @date 2014
* Full-stack compiler that converts a source code string to bytecode.
*/
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
using namespace std;
using namespace dev;
using namespace dev::solidity;
CompilerStack::CompilerStack(ReadFileCallback const& _readFile):
m_readFile(_readFile), m_parseSuccessful(false) {}
void CompilerStack::setRemappings(vector const& _remappings)
{
vector remappings;
for (auto const& remapping: _remappings)
{
auto eq = find(remapping.begin(), remapping.end(), '=');
if (eq == remapping.end())
continue; // ignore
auto colon = find(remapping.begin(), eq, ':');
Remapping r;
r.context = colon == eq ? string() : string(remapping.begin(), colon);
r.prefix = colon == eq ? string(remapping.begin(), eq) : string(colon + 1, eq);
r.target = string(eq + 1, remapping.end());
remappings.push_back(r);
}
swap(m_remappings, remappings);
}
void CompilerStack::reset(bool _keepSources)
{
m_parseSuccessful = false;
if (_keepSources)
for (auto sourcePair: m_sources)
sourcePair.second.reset();
else
{
m_sources.clear();
}
m_optimize = false;
m_optimizeRuns = 200;
m_globalContext.reset();
m_sourceOrder.clear();
m_contracts.clear();
m_errors.clear();
}
bool CompilerStack::addSource(string const& _name, string const& _content, bool _isLibrary)
{
bool existed = m_sources.count(_name) != 0;
reset(true);
m_sources[_name].scanner = make_shared(CharStream(_content), _name);
m_sources[_name].isLibrary = _isLibrary;
return existed;
}
void CompilerStack::setSource(string const& _sourceCode)
{
reset();
addSource("", _sourceCode);
}
bool CompilerStack::parse()
{
//reset
m_errors.clear();
m_parseSuccessful = false;
if (SemVerVersion{string(VersionString)}.isPrerelease())
{
auto err = make_shared(Error::Type::Warning);
*err << errinfo_comment("This is a pre-release compiler version, please do not use it in production.");
m_errors.push_back(err);
}
vector sourcesToParse;
for (auto const& s: m_sources)
sourcesToParse.push_back(s.first);
map sourceUnitsByName;
for (size_t i = 0; i < sourcesToParse.size(); ++i)
{
string const& path = sourcesToParse[i];
Source& source = m_sources[path];
source.scanner->reset();
source.ast = Parser(m_errors).parse(source.scanner);
sourceUnitsByName[path] = source.ast.get();
if (!source.ast)
solAssert(!Error::containsOnlyWarnings(m_errors), "Parser returned null but did not report error.");
else
{
source.ast->annotation().path = path;
for (auto const& newSource: loadMissingSources(*source.ast, path))
{
string const& newPath = newSource.first;
string const& newContents = newSource.second;
m_sources[newPath].scanner = make_shared(CharStream(newContents), newPath);
sourcesToParse.push_back(newPath);
}
}
}
if (!Error::containsOnlyWarnings(m_errors))
// errors while parsing. sould stop before type checking
return false;
resolveImports();
bool noErrors = true;
SyntaxChecker syntaxChecker(m_errors);
for (Source const* source: m_sourceOrder)
if (!syntaxChecker.checkSyntax(*source->ast))
noErrors = false;
DocStringAnalyser docStringAnalyser(m_errors);
for (Source const* source: m_sourceOrder)
if (!docStringAnalyser.analyseDocStrings(*source->ast))
noErrors = false;
m_globalContext = make_shared();
NameAndTypeResolver resolver(m_globalContext->declarations(), m_errors);
for (Source const* source: m_sourceOrder)
if (!resolver.registerDeclarations(*source->ast))
return false;
for (Source const* source: m_sourceOrder)
if (!resolver.performImports(*source->ast, sourceUnitsByName))
return false;
for (Source const* source: m_sourceOrder)
for (ASTPointer const& node: source->ast->nodes())
if (ContractDefinition* contract = dynamic_cast(node.get()))
{
m_globalContext->setCurrentContract(*contract);
if (!resolver.updateDeclaration(*m_globalContext->currentThis())) return false;
if (!resolver.updateDeclaration(*m_globalContext->currentSuper())) return false;
if (!resolver.resolveNamesAndTypes(*contract)) return false;
m_contracts[contract->name()].contract = contract;
}
if (!checkLibraryNameClashes())
noErrors = false;
for (Source const* source: m_sourceOrder)
for (ASTPointer const& node: source->ast->nodes())
if (ContractDefinition* contract = dynamic_cast(node.get()))
{
m_globalContext->setCurrentContract(*contract);
resolver.updateDeclaration(*m_globalContext->currentThis());
TypeChecker typeChecker(m_errors);
if (typeChecker.checkTypeRequirements(*contract))
{
contract->setDevDocumentation(InterfaceHandler::devDocumentation(*contract));
contract->setUserDocumentation(InterfaceHandler::userDocumentation(*contract));
}
else
noErrors = false;
m_contracts[contract->name()].contract = contract;
}
m_parseSuccessful = noErrors;
return m_parseSuccessful;
}
bool CompilerStack::parse(string const& _sourceCode)
{
setSource(_sourceCode);
return parse();
}
vector CompilerStack::contractNames() const
{
if (!m_parseSuccessful)
BOOST_THROW_EXCEPTION(CompilerError() << errinfo_comment("Parsing was not successful."));
vector contractNames;
for (auto const& contract: m_contracts)
contractNames.push_back(contract.first);
return contractNames;
}
bool CompilerStack::compile(bool _optimize, unsigned _runs, map const& _libraries)
{
if (!m_parseSuccessful)
if (!parse())
return false;
m_optimize = _optimize;
m_optimizeRuns = _runs;
m_libraries = _libraries;
map compiledContracts;
for (Source const* source: m_sourceOrder)
for (ASTPointer const& node: source->ast->nodes())
if (auto contract = dynamic_cast(node.get()))
compileContract(*contract, compiledContracts);
this->link();
return true;
}
bool CompilerStack::compile(string const& _sourceCode, bool _optimize, unsigned _runs)
{
return parse(_sourceCode) && compile(_optimize, _runs);
}
void CompilerStack::link()
{
for (auto& contract: m_contracts)
{
contract.second.object.link(m_libraries);
contract.second.runtimeObject.link(m_libraries);
contract.second.cloneObject.link(m_libraries);
}
}
bool CompilerStack::prepareFormalAnalysis(ErrorList* _errors)
{
if (!_errors)
_errors = &m_errors;
Why3Translator translator(*_errors);
for (Source const* source: m_sourceOrder)
if (!translator.process(*source->ast))
return false;
m_formalTranslation = translator.translation();
return true;
}
eth::AssemblyItems const* CompilerStack::assemblyItems(string const& _contractName) const
{
Contract const& currentContract = contract(_contractName);
return currentContract.compiler ? &contract(_contractName).compiler->assemblyItems() : nullptr;
}
eth::AssemblyItems const* CompilerStack::runtimeAssemblyItems(string const& _contractName) const
{
Contract const& currentContract = contract(_contractName);
return currentContract.compiler ? &contract(_contractName).compiler->runtimeAssemblyItems() : nullptr;
}
string const* CompilerStack::sourceMapping(string const& _contractName) const
{
Contract const& c = contract(_contractName);
if (!c.sourceMapping)
{
if (auto items = assemblyItems(_contractName))
c.sourceMapping.reset(new string(computeSourceMapping(*items)));
}
return c.sourceMapping.get();
}
string const* CompilerStack::runtimeSourceMapping(string const& _contractName) const
{
Contract const& c = contract(_contractName);
if (!c.runtimeSourceMapping)
{
if (auto items = runtimeAssemblyItems(_contractName))
c.runtimeSourceMapping.reset(new string(computeSourceMapping(*items)));
}
return c.runtimeSourceMapping.get();
}
eth::LinkerObject const& CompilerStack::object(string const& _contractName) const
{
return contract(_contractName).object;
}
eth::LinkerObject const& CompilerStack::runtimeObject(string const& _contractName) const
{
return contract(_contractName).runtimeObject;
}
eth::LinkerObject const& CompilerStack::cloneObject(string const& _contractName) const
{
return contract(_contractName).cloneObject;
}
dev::h256 CompilerStack::contractCodeHash(string const& _contractName) const
{
auto const& obj = runtimeObject(_contractName);
if (obj.bytecode.empty() || !obj.linkReferences.empty())
return dev::h256();
else
return dev::keccak256(obj.bytecode);
}
Json::Value CompilerStack::streamAssembly(ostream& _outStream, string const& _contractName, StringMap _sourceCodes, bool _inJsonFormat) const
{
Contract const& currentContract = contract(_contractName);
if (currentContract.compiler)
return currentContract.compiler->streamAssembly(_outStream, _sourceCodes, _inJsonFormat);
else
{
_outStream << "Contract not fully implemented" << endl;
return Json::Value();
}
}
vector CompilerStack::sourceNames() const
{
vector names;
for (auto const& s: m_sources)
names.push_back(s.first);
return names;
}
map CompilerStack::sourceIndices() const
{
map indices;
for (auto const& s: m_sources)
indices[s.first] = indices.size();
return indices;
}
Json::Value const& CompilerStack::interface(string const& _contractName) const
{
return metadata(_contractName, DocumentationType::ABIInterface);
}
Json::Value const& CompilerStack::metadata(string const& _contractName, DocumentationType _type) const
{
if (!m_parseSuccessful)
BOOST_THROW_EXCEPTION(CompilerError() << errinfo_comment("Parsing was not successful."));
return metadata(contract(_contractName), _type);
}
Json::Value const& CompilerStack::metadata(Contract const& _contract, DocumentationType _type) const
{
if (!m_parseSuccessful)
BOOST_THROW_EXCEPTION(CompilerError() << errinfo_comment("Parsing was not successful."));
solAssert(_contract.contract, "");
std::unique_ptr* doc;
// checks wheather we already have the documentation
switch (_type)
{
case DocumentationType::NatspecUser:
doc = &_contract.userDocumentation;
break;
case DocumentationType::NatspecDev:
doc = &_contract.devDocumentation;
break;
case DocumentationType::ABIInterface:
doc = &_contract.interface;
break;
default:
BOOST_THROW_EXCEPTION(InternalCompilerError() << errinfo_comment("Illegal documentation type."));
}
// caches the result
if (!*doc)
doc->reset(new Json::Value(InterfaceHandler::documentation(*_contract.contract, _type)));
return *(*doc);
}
string const& CompilerStack::onChainMetadata(string const& _contractName) const
{
if (!m_parseSuccessful)
BOOST_THROW_EXCEPTION(CompilerError() << errinfo_comment("Parsing was not successful."));
return contract(_contractName).onChainMetadata;
}
Scanner const& CompilerStack::scanner(string const& _sourceName) const
{
return *source(_sourceName).scanner;
}
SourceUnit const& CompilerStack::ast(string const& _sourceName) const
{
return *source(_sourceName).ast;
}
ContractDefinition const& CompilerStack::contractDefinition(string const& _contractName) const
{
return *contract(_contractName).contract;
}
size_t CompilerStack::functionEntryPoint(
std::string const& _contractName,
FunctionDefinition const& _function
) const
{
shared_ptr const& compiler = contract(_contractName).compiler;
if (!compiler)
return 0;
eth::AssemblyItem tag = compiler->functionEntryLabel(_function);
if (tag.type() == eth::UndefinedItem)
return 0;
eth::AssemblyItems const& items = compiler->runtimeAssemblyItems();
for (size_t i = 0; i < items.size(); ++i)
if (items.at(i).type() == eth::Tag && items.at(i).data() == tag.data())
return i;
return 0;
}
tuple CompilerStack::positionFromSourceLocation(SourceLocation const& _sourceLocation) const
{
int startLine;
int startColumn;
int endLine;
int endColumn;
tie(startLine, startColumn) = scanner(*_sourceLocation.sourceName).translatePositionToLineColumn(_sourceLocation.start);
tie(endLine, endColumn) = scanner(*_sourceLocation.sourceName).translatePositionToLineColumn(_sourceLocation.end);
return make_tuple(++startLine, ++startColumn, ++endLine, ++endColumn);
}
StringMap CompilerStack::loadMissingSources(SourceUnit const& _ast, std::string const& _sourcePath)
{
StringMap newSources;
for (auto const& node: _ast.nodes())
if (ImportDirective const* import = dynamic_cast(node.get()))
{
string importPath = absolutePath(import->path(), _sourcePath);
// The current value of `path` is the absolute path as seen from this source file.
// We first have to apply remappings before we can store the actual absolute path
// as seen globally.
importPath = applyRemapping(importPath, _sourcePath);
import->annotation().absolutePath = importPath;
if (m_sources.count(importPath) || newSources.count(importPath))
continue;
ReadFileResult result{false, string("File not supplied initially.")};
if (m_readFile)
result = m_readFile(importPath);
if (result.success)
newSources[importPath] = result.contentsOrErrorMesage;
else
{
auto err = make_shared(Error::Type::ParserError);
*err <<
errinfo_sourceLocation(import->location()) <<
errinfo_comment("Source \"" + importPath + "\" not found: " + result.contentsOrErrorMesage);
m_errors.push_back(std::move(err));
continue;
}
}
return newSources;
}
string CompilerStack::applyRemapping(string const& _path, string const& _context)
{
// Try to find the longest prefix match in all remappings that are active in the current context.
auto isPrefixOf = [](string const& _a, string const& _b)
{
if (_a.length() > _b.length())
return false;
return std::equal(_a.begin(), _a.end(), _b.begin());
};
size_t longestPrefix = 0;
string longestPrefixTarget;
for (auto const& redir: m_remappings)
{
// Skip if we already have a closer match.
if (longestPrefix > 0 && redir.prefix.length() <= longestPrefix)
continue;
// Skip if redir.context is not a prefix of _context
if (!isPrefixOf(redir.context, _context))
continue;
// Skip if the prefix does not match.
if (!isPrefixOf(redir.prefix, _path))
continue;
longestPrefix = redir.prefix.length();
longestPrefixTarget = redir.target;
}
string path = longestPrefixTarget;
path.append(_path.begin() + longestPrefix, _path.end());
return path;
}
void CompilerStack::resolveImports()
{
// topological sorting (depth first search) of the import graph, cutting potential cycles
vector