Easy review fixes.

This commit is contained in:
Daniel Kirchner 2020-09-17 17:25:37 +02:00
parent 0eb32b3f60
commit 976abf4715
5 changed files with 79 additions and 51 deletions

View File

@ -958,7 +958,7 @@ memoryguard
This function is available in the EVM dialect with objects. The caller of This function is available in the EVM dialect with objects. The caller of
``let ptr := memoryguard(size)`` (where ``size`` has to be a literal number) ``let ptr := memoryguard(size)`` (where ``size`` has to be a literal number)
promises that they only use memory in either the range ``[0, size)`` or the promises that they only use memory in either the range ``[0, size)`` or the
unbounded range above ``ptr``. unbounded range starting at ``ptr``.
Since the presence of a ``memoryguard`` call indicates that all memory access Since the presence of a ``memoryguard`` call indicates that all memory access
adheres to this restriction, it allows the optimizer to perform additional adheres to this restriction, it allows the optimizer to perform additional

View File

@ -142,7 +142,10 @@ string IRGenerator::generate(
InternalDispatchMap internalDispatchMap = generateInternalDispatchFunctions(); InternalDispatchMap internalDispatchMap = generateInternalDispatchFunctions();
t("functions", m_context.functionCollector().requestedFunctions()); t("functions", m_context.functionCollector().requestedFunctions());
t("subObjects", subObjectSources(m_context.subObjectsCreated())); t("subObjects", subObjectSources(m_context.subObjectsCreated()));
t("memoryInitCreation", memoryInit(!m_context.inlineAssemblySeen()));
// This has to be called only after all other code generation for the creation object is complete.
bool creationInvolvesAssembly = m_context.inlineAssemblySeen();
t("memoryInitCreation", memoryInit(!creationInvolvesAssembly));
resetContext(_contract); resetContext(_contract);
@ -158,7 +161,10 @@ string IRGenerator::generate(
generateInternalDispatchFunctions(); generateInternalDispatchFunctions();
t("runtimeFunctions", m_context.functionCollector().requestedFunctions()); t("runtimeFunctions", m_context.functionCollector().requestedFunctions());
t("runtimeSubObjects", subObjectSources(m_context.subObjectsCreated())); t("runtimeSubObjects", subObjectSources(m_context.subObjectsCreated()));
t("memoryInitRuntime", memoryInit(!m_context.inlineAssemblySeen()));
// This has to be called only after all other code generation for the runtime object is complete.
bool runtimeInvolvesAssembly = m_context.inlineAssemblySeen();
t("memoryInitRuntime", memoryInit(!runtimeInvolvesAssembly));
return t.render(); return t.render();
} }

View File

@ -35,31 +35,33 @@ using namespace solidity::yul;
namespace namespace
{ {
// Walks the call graph using a Depth-First-Search assigning memory offsets to variables. /**
// - The leaves of the call graph will get the lowest offsets, increasing towards the root. * Walks the call graph using a Depth-First-Search assigning memory slots to variables.
// - ``nextAvailableSlot`` maps a function to the next available slot that can be used by another * - The leaves of the call graph will get the lowest slot, increasing towards the root.
// function that calls it. * - ``slotsRequiredForFunction`` maps a function to the number of slots it requires (which is also the
// - For each function starting from the root of the call graph: * next available slot that can be used by another function that calls this function).
// - Visit all children that are not already visited. * - For each function starting from the root of the call graph:
// - Determine the maximum value ``n`` of the values of ``nextAvailableSlot`` among the children. * - Visit all children that are not already visited.
// - If the function itself contains variables that need memory slots, but is contained in a cycle, * - Determine the maximum value ``n`` of the values of ``slotsRequiredForFunction`` among the children.
// abort the process as failure. * - If the function itself contains variables that need memory slots, but is contained in a cycle,
// - If not, assign each variable its slot starting from ``n`` (incrementing it). * abort the process as failure.
// - Assign ``n`` to ``nextAvailableSlot`` of the function. * - If not, assign each variable its slot starting from ``n`` (incrementing it).
* - Assign ``n`` to ``slotsRequiredForFunction`` of the function.
*/
struct MemoryOffsetAllocator struct MemoryOffsetAllocator
{ {
uint64_t run(YulString _function = YulString{}) uint64_t run(YulString _function = YulString{})
{ {
if (nextAvailableSlot.count(_function)) if (slotsRequiredForFunction.count(_function))
return nextAvailableSlot[_function]; return slotsRequiredForFunction[_function];
// Assign to zero early to guard against recursive calls. // Assign to zero early to guard against recursive calls.
nextAvailableSlot[_function] = 0; slotsRequiredForFunction[_function] = 0;
uint64_t nextSlot = 0; uint64_t requiredSlots = 0;
if (callGraph.count(_function)) if (callGraph.count(_function))
for (YulString child: callGraph.at(_function)) for (YulString child: callGraph.at(_function))
nextSlot = std::max(run(child), nextSlot); requiredSlots = std::max(run(child), requiredSlots);
if (unreachableVariables.count(_function)) if (unreachableVariables.count(_function))
{ {
@ -71,17 +73,17 @@ struct MemoryOffsetAllocator
// TODO: Too many function arguments or return parameters. // TODO: Too many function arguments or return parameters.
} }
else else
assignedSlots[variable] = nextSlot++; assignedSlots[variable] = requiredSlots++;
} }
return nextAvailableSlot[_function] = nextSlot; return slotsRequiredForFunction[_function] = requiredSlots;
} }
map<YulString, set<YulString>> const& unreachableVariables; map<YulString, set<YulString>> const& unreachableVariables;
map<YulString, set<YulString>> const& callGraph; map<YulString, set<YulString>> const& callGraph;
map<YulString, map<YulString, uint64_t>> slotAllocations{}; map<YulString, map<YulString, uint64_t>> slotAllocations{};
map<YulString, uint64_t> nextAvailableSlot{}; map<YulString, uint64_t> slotsRequiredForFunction{};
}; };
u256 literalArgumentValue(FunctionCall const& _call) u256 literalArgumentValue(FunctionCall const& _call)
@ -116,8 +118,8 @@ void StackLimitEvader::run(
// Make sure all calls to ``memoryguard`` we found have the same value as argument (otherwise, abort). // Make sure all calls to ``memoryguard`` we found have the same value as argument (otherwise, abort).
u256 reservedMemory = literalArgumentValue(*memoryGuardCalls.front()); u256 reservedMemory = literalArgumentValue(*memoryGuardCalls.front());
for (FunctionCall const* getFreeMemoryStartCall: memoryGuardCalls) for (FunctionCall const* memoryGuardCall: memoryGuardCalls)
if (reservedMemory != literalArgumentValue(*getFreeMemoryStartCall)) if (reservedMemory != literalArgumentValue(*memoryGuardCall))
return; return;
CallGraph callGraph = CallGraphGenerator::callGraph(*_object.code); CallGraph callGraph = CallGraphGenerator::callGraph(*_object.code);
@ -130,13 +132,14 @@ void StackLimitEvader::run(
MemoryOffsetAllocator memoryOffsetAllocator{_unreachableVariables, callGraph.functionCalls}; MemoryOffsetAllocator memoryOffsetAllocator{_unreachableVariables, callGraph.functionCalls};
uint64_t requiredSlots = memoryOffsetAllocator.run(); uint64_t requiredSlots = memoryOffsetAllocator.run();
StackToMemoryMover{_context, reservedMemory, memoryOffsetAllocator.slotAllocations}(*_object.code); StackToMemoryMover::run(_context, reservedMemory, memoryOffsetAllocator.slotAllocations, *_object.code);
yulAssert(requiredSlots < std::numeric_limits<uint64_t>::max() / 32, "");
reservedMemory += 32 * requiredSlots; reservedMemory += 32 * requiredSlots;
YulString reservedMemoryString{util::toCompactHexWithPrefix(reservedMemory)}; for (FunctionCall* memoryGuardCall: FunctionCallFinder::run(*_object.code, "memoryguard"_yulstring))
for (FunctionCall* memoryGuardCall: memoryGuardCalls)
{ {
Literal* literal = std::get_if<Literal>(&memoryGuardCall->arguments.front()); Literal* literal = std::get_if<Literal>(&memoryGuardCall->arguments.front());
yulAssert(literal && literal->kind == LiteralKind::Number, ""); yulAssert(literal && literal->kind == LiteralKind::Number, "");
literal->value = reservedMemoryString; literal->value = YulString{util::toCompactHexWithPrefix(reservedMemory)};
} }
} }

View File

@ -29,14 +29,14 @@ using namespace solidity::yul;
namespace namespace
{ {
void appendMemoryStore( vector<Statement> generateMemoryStore(
vector<Statement>& _statements,
langutil::SourceLocation const& _loc, langutil::SourceLocation const& _loc,
YulString _mpos, YulString _mpos,
Expression _value Expression _value
) )
{ {
_statements.emplace_back(ExpressionStatement{_loc, FunctionCall{ vector<Statement> result;
result.emplace_back(ExpressionStatement{_loc, FunctionCall{
_loc, _loc,
Identifier{_loc, "mstore"_yulstring}, Identifier{_loc, "mstore"_yulstring},
{ {
@ -44,9 +44,21 @@ void appendMemoryStore(
std::move(_value) std::move(_value)
} }
}}); }});
return result;
} }
} }
void StackToMemoryMover::run(
OptimiserStepContext& _context,
u256 _reservedMemory,
map<YulString, map<YulString, uint64_t>> const& _memorySlots,
Block& _block
)
{
StackToMemoryMover stackToMemoryMover(_context, _reservedMemory, _memorySlots);
stackToMemoryMover(_block);
}
StackToMemoryMover::StackToMemoryMover( StackToMemoryMover::StackToMemoryMover(
OptimiserStepContext& _context, OptimiserStepContext& _context,
u256 _reservedMemory, u256 _reservedMemory,
@ -66,22 +78,20 @@ StackToMemoryMover::StackToMemoryMover(
void StackToMemoryMover::operator()(FunctionDefinition& _functionDefinition) void StackToMemoryMover::operator()(FunctionDefinition& _functionDefinition)
{ {
map<YulString, uint64_t> const* saved = m_currentFunctionMemorySlots;
if (m_memorySlots.count(_functionDefinition.name)) if (m_memorySlots.count(_functionDefinition.name))
{ {
map<YulString, uint64_t> const* saved = m_currentFunctionMemorySlots;
m_currentFunctionMemorySlots = &m_memorySlots.at(_functionDefinition.name); m_currentFunctionMemorySlots = &m_memorySlots.at(_functionDefinition.name);
for (TypedName const& param: _functionDefinition.parameters + _functionDefinition.returnVariables) for (TypedName const& param: _functionDefinition.parameters + _functionDefinition.returnVariables)
if (m_currentFunctionMemorySlots->count(param.name)) if (m_currentFunctionMemorySlots->count(param.name))
{ {
// TODO: we cannot handle function parameters yet. // TODO: we cannot handle function parameters yet.
m_currentFunctionMemorySlots = nullptr; m_currentFunctionMemorySlots = saved;
break; return;
} }
ASTModifier::operator()(_functionDefinition);
m_currentFunctionMemorySlots = saved;
} }
else
m_currentFunctionMemorySlots = nullptr;
ASTModifier::operator()(_functionDefinition);
m_currentFunctionMemorySlots = saved;
} }
void StackToMemoryMover::operator()(Block& _block) void StackToMemoryMover::operator()(Block& _block)
@ -103,16 +113,11 @@ void StackToMemoryMover::operator()(Block& _block)
std::unique_ptr<Expression> _value std::unique_ptr<Expression> _value
) -> std::vector<Statement> { ) -> std::vector<Statement> {
if (_variables.size() == 1) if (_variables.size() == 1)
{ return generateMemoryStore(
std::vector<Statement> result;
appendMemoryStore(
result,
_loc, _loc,
memoryOffset(_variables.front().name), memoryOffset(_variables.front().name),
_value ? *std::move(_value) : Literal{_loc, LiteralKind::Number, "0"_yulstring, {}} _value ? *std::move(_value) : Literal{_loc, LiteralKind::Number, "0"_yulstring, {}}
); );
return result;
}
VariableDeclaration tempDecl{_loc, {}, std::move(_value)}; VariableDeclaration tempDecl{_loc, {}, std::move(_value)};
vector<Statement> memoryAssignments; vector<Statement> memoryAssignments;
@ -123,7 +128,7 @@ void StackToMemoryMover::operator()(Block& _block)
tempDecl.variables.emplace_back(TypedName{var.location, tempVarName, {}}); tempDecl.variables.emplace_back(TypedName{var.location, tempVarName, {}});
if (m_currentFunctionMemorySlots->count(var.name)) if (m_currentFunctionMemorySlots->count(var.name))
appendMemoryStore(memoryAssignments, _loc, memoryOffset(var.name), Identifier{_loc, tempVarName}); memoryAssignments += generateMemoryStore(_loc, memoryOffset(var.name), Identifier{_loc, tempVarName});
else if constexpr (std::is_same_v<std::decay_t<decltype(var)>, Identifier>) else if constexpr (std::is_same_v<std::decay_t<decltype(var)>, Identifier>)
variableAssignments.emplace_back(Assignment{ variableAssignments.emplace_back(Assignment{
_loc, { Identifier{var.location, var.name} }, _loc, { Identifier{var.location, var.name} },
@ -186,10 +191,10 @@ void StackToMemoryMover::visit(Expression& _expression)
) )
{ {
langutil::SourceLocation loc = identifier->location; langutil::SourceLocation loc = identifier->location;
_expression = FunctionCall { _expression = FunctionCall{
loc, loc,
Identifier{loc, "mload"_yulstring}, { Identifier{loc, "mload"_yulstring}, {
Literal { Literal{
loc, loc,
LiteralKind::Number, LiteralKind::Number,
memoryOffset(identifier->name), memoryOffset(identifier->name),

View File

@ -73,23 +73,37 @@ namespace solidity::yul
* If a visited function has arguments or return parameters that are contained in the map, * If a visited function has arguments or return parameters that are contained in the map,
* the entire function is skipped (no local variables in the function will be moved at all). * the entire function is skipped (no local variables in the function will be moved at all).
* *
* Prerequisite: Disambiguator, ForLoopInitRewriter. * Prerequisite: Disambiguator, ForLoopInitRewriter, FunctionHoister.
*/ */
class StackToMemoryMover: ASTModifier class StackToMemoryMover: ASTModifier
{ {
public: public:
StackToMemoryMover( /**
* Runs the stack to memory mover.
* @param _reservedMemory Is the amount of previously reserved memory,
* i.e. the lowest memory offset to which variables can be moved.
* @param _memorySlots A map from variables to a slot in memory. The offset to which a variables will be moved
* is given by _reservedMemory plus 32 times its entry in @a _memorySlots.
*/
static void run(
OptimiserStepContext& _context, OptimiserStepContext& _context,
u256 _reservedMemory, u256 _reservedMemory,
std::map<YulString, std::map<YulString, uint64_t>> const& _memoryOffsets std::map<YulString, std::map<YulString, uint64_t>> const& _memorySlots,
Block& _block
); );
using ASTModifier::operator(); using ASTModifier::operator();
void operator()(FunctionDefinition& _functionDefinition) override; void operator()(FunctionDefinition& _functionDefinition) override;
void operator()(Block& _block) override; void operator()(Block& _block) override;
void visit(Expression& _expression) override; void visit(Expression& _expression) override;
private: private:
StackToMemoryMover(
OptimiserStepContext& _context,
u256 _reservedMemory,
std::map<YulString, std::map<YulString, uint64_t>> const& _memorySlots
);
/// @returns a YulString containing the memory offset to be assigned to @a _variable as number literal.
YulString memoryOffset(YulString _variable); YulString memoryOffset(YulString _variable);
u256 m_reservedMemory; u256 m_reservedMemory;
std::map<YulString, std::map<YulString, uint64_t>> const& m_memorySlots; std::map<YulString, std::map<YulString, uint64_t>> const& m_memorySlots;