Report stack too deep and conditionally aggressively compress stack in StackLayoutGenerator.

This commit is contained in:
Daniel Kirchner 2021-09-03 15:05:14 +02:00
parent 8447b32d52
commit e022ba1bfb
3 changed files with 168 additions and 16 deletions

View File

@ -342,6 +342,11 @@ private:
/// Transforms @a _currentStack to @a _targetStack, invoking the provided shuffling operations.
/// Modifies @a _currentStack itself after each invocation of the shuffling operations.
/// @a _swap is a function with signature void(unsigned) that is called when the top most slot is swapped with
/// the slot `depth` slots below the top. In terms of EVM opcodes this is supposed to be a `SWAP<depth>`.
/// @a _pushOrDup is a function with signature void(StackSlot const&) that is called to push or dup the slot given as
/// its argument to the stack top.
/// @a _pop is a function with signature void() that is called when the top most slot is popped.
template<typename Swap, typename PushOrDup, typename Pop>
void createStackLayout(Stack& _currentStack, Stack const& _targetStack, Swap _swap, PushOrDup _pushOrDup, Pop _pop)
{

View File

@ -28,6 +28,7 @@
#include <libsolutil/Visitor.h>
#include <range/v3/algorithm/any_of.hpp>
#include <range/v3/algorithm/find.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/all.hpp>
#include <range/v3/view/concat.hpp>
@ -38,6 +39,7 @@
#include <range/v3/view/map.hpp>
#include <range/v3/view/reverse.hpp>
#include <range/v3/view/take.hpp>
#include <range/v3/view/take_last.hpp>
#include <range/v3/view/transform.hpp>
using namespace solidity;
@ -55,12 +57,83 @@ StackLayout StackLayoutGenerator::run(CFG const& _cfg)
return stackLayout;
}
map<YulString, vector<StackLayoutGenerator::StackTooDeep>> StackLayoutGenerator::reportStackTooDeep(CFG const& _cfg)
{
map<YulString, vector<StackLayoutGenerator::StackTooDeep>> stackTooDeepErrors;
stackTooDeepErrors[YulString{}] = reportStackTooDeep(_cfg, YulString{});
for (auto const& function: _cfg.functions)
if (auto errors = reportStackTooDeep(_cfg, function->name); !errors.empty())
stackTooDeepErrors[function->name] = move(errors);
return stackTooDeepErrors;
}
vector<StackLayoutGenerator::StackTooDeep> StackLayoutGenerator::reportStackTooDeep(CFG const& _cfg, YulString _functionName)
{
StackLayout stackLayout;
CFG::FunctionInfo const* functionInfo = nullptr;
if (!_functionName.empty())
{
functionInfo = &ranges::find(
_cfg.functionInfo,
_functionName,
util::mapTuple([](auto&&, auto&& info) { return info.function.name; })
)->second;
yulAssert(functionInfo, "Function not found.");
}
StackLayoutGenerator generator{stackLayout};
CFG::BasicBlock const* entry = functionInfo ? functionInfo->entry : _cfg.entry;
generator.processEntryPoint(*entry);
return generator.reportStackTooDeep(*entry);
}
StackLayoutGenerator::StackLayoutGenerator(StackLayout& _layout): m_layout(_layout)
{
}
namespace
{
/// @returns all stack too deep errors that would occur when shuffling @a _source to @a _target.
vector<StackLayoutGenerator::StackTooDeep> findStackTooDeep(Stack const& _source, Stack const& _target)
{
Stack currentStack = _source;
vector<StackLayoutGenerator::StackTooDeep> stackTooDeepErrors;
auto getVariableChoices = [](auto&& _range) {
vector<YulString> result;
for (auto const& slot: _range)
if (auto const* variableSlot = get_if<VariableSlot>(&slot))
if (!util::contains(result, variableSlot->variable.get().name))
result.push_back(variableSlot->variable.get().name);
return result;
};
::createStackLayout(
currentStack,
_target,
[&](unsigned _i)
{
if (_i > 16)
stackTooDeepErrors.emplace_back(StackLayoutGenerator::StackTooDeep{
_i - 16,
getVariableChoices(currentStack | ranges::views::take_last(_i + 1))
});
},
[&](StackSlot const& _slot)
{
if (canBeFreelyGenerated(_slot))
return;
if (
auto depth = util::findOffset(currentStack | ranges::views::reverse, _slot);
depth && *depth >= 16
)
stackTooDeepErrors.emplace_back(StackLayoutGenerator::StackTooDeep{
*depth - 15,
getVariableChoices(currentStack | ranges::views::take_last(*depth + 1))
});
},
[&]() {}
);
return stackTooDeepErrors;
}
/// @returns the ideal stack to have before executing an operation that outputs @a _operationOutput, s.t.
/// shuffling to @a _post is cheap (excluding the input of the operation itself).
@ -191,13 +264,16 @@ Stack createIdealLayout(Stack const& _operationOutput, Stack const& _post, Calla
}
}
Stack StackLayoutGenerator::propagateStackThroughOperation(Stack _exitStack, CFG::Operation const& _operation)
Stack StackLayoutGenerator::propagateStackThroughOperation(Stack _exitStack, CFG::Operation const& _operation, bool _aggressiveStackCompression)
{
// Enable aggressive stack compression for recursive calls.
if (auto const* functionCall = get_if<CFG::FunctionCall>(&_operation.operation))
if (functionCall->recursive)
_aggressiveStackCompression = true;
// This is a huge tradeoff between code size, gas cost and stack size.
auto generateSlotOnTheFly = [&](StackSlot const&) {
//return stack.size() > 12 && canBeFreelyGenerated(_slot);
// return canBeFreelyGenerated(_slot);
return false;
auto generateSlotOnTheFly = [&](StackSlot const& _slot) {
return _aggressiveStackCompression && canBeFreelyGenerated(_slot);
};
// Determine the ideal permutation of the slots in _exitLayout that are not operation outputs (and not to be
@ -235,18 +311,21 @@ Stack StackLayoutGenerator::propagateStackThroughOperation(Stack _exitStack, CFG
break;
}
// TODO: there may be a better criterion than overall stack size.
if (stack.size() > 12)
// Deduplicate and remove slots that can be freely generated.
stack = compressStack(move(stack));
return stack;
}
Stack StackLayoutGenerator::propagateStackThroughBlock(Stack _exitStack, CFG::BasicBlock const& _block)
Stack StackLayoutGenerator::propagateStackThroughBlock(Stack _exitStack, CFG::BasicBlock const& _block, bool _aggressiveStackCompression)
{
Stack stack = std::move(_exitStack);
for (auto& operation: _block.operations | ranges::views::reverse)
stack = propagateStackThroughOperation(stack, operation);
Stack stack = _exitStack;
for (auto&& [idx, operation]: _block.operations | ranges::views::enumerate | ranges::views::reverse)
{
Stack newStack = propagateStackThroughOperation(stack, operation, _aggressiveStackCompression);
if (!_aggressiveStackCompression && !findStackTooDeep(newStack, stack).empty())
// If we had stack errors, run again with aggressive stack compression.
return propagateStackThroughBlock(move(_exitStack), _block, true);
stack = move(newStack);
}
return stack;
}
@ -507,7 +586,7 @@ Stack StackLayoutGenerator::combineStack(Stack const& _stack1, Stack const& _sta
if (depth && *depth >= 16)
numOps += 1000;
};
createStackLayout(testStack, stack1Tail, swap, dupOrPush, [&](){} );
createStackLayout(testStack, stack1Tail, swap, dupOrPush, [&](){});
testStack = _candidate;
createStackLayout(testStack, stack2Tail, swap, dupOrPush, [&](){});
return numOps;
@ -549,6 +628,54 @@ Stack StackLayoutGenerator::combineStack(Stack const& _stack1, Stack const& _sta
return commonPrefix + bestCandidate;
}
vector<StackLayoutGenerator::StackTooDeep> StackLayoutGenerator::reportStackTooDeep(CFG::BasicBlock const& _entry) const
{
vector<StackTooDeep> stackTooDeepErrors;
util::BreadthFirstSearch<CFG::BasicBlock const*> breadthFirstSearch{{&_entry}};
breadthFirstSearch.run([&](CFG::BasicBlock const* _block, auto _addChild) {
Stack currentStack = m_layout.blockInfos.at(_block).entryLayout;
for (auto const& operation: _block->operations)
{
Stack& operationEntry = m_layout.operationEntryLayout.at(&operation);
stackTooDeepErrors += findStackTooDeep(currentStack, operationEntry);
currentStack = operationEntry;
for (size_t i = 0; i < operation.input.size(); i++)
currentStack.pop_back();
currentStack += operation.output;
}
// Do not attempt to create the exit layout m_layout.blockInfos.at(_block).exitLayout here,
// since the code generator will directly move to the target entry layout.
std::visit(util::GenericVisitor{
[&](CFG::BasicBlock::MainExit const&) {},
[&](CFG::BasicBlock::Jump const& _jump)
{
Stack const& targetLayout = m_layout.blockInfos.at(_jump.target).entryLayout;
stackTooDeepErrors += findStackTooDeep(currentStack, targetLayout);
if (!_jump.backwards)
_addChild(_jump.target);
},
[&](CFG::BasicBlock::ConditionalJump const& _conditionalJump)
{
for (Stack const& targetLayout: {
m_layout.blockInfos.at(_conditionalJump.zero).entryLayout,
m_layout.blockInfos.at(_conditionalJump.nonZero).entryLayout
})
stackTooDeepErrors += findStackTooDeep(currentStack, targetLayout);
_addChild(_conditionalJump.zero);
_addChild(_conditionalJump.nonZero);
},
[&](CFG::BasicBlock::FunctionReturn const&) {},
[&](CFG::BasicBlock::Terminated const&) {},
}, _block->exit);
});
return stackTooDeepErrors;
}
Stack StackLayoutGenerator::compressStack(Stack _stack)
{
optional<size_t> firstDupOffset;

View File

@ -47,7 +47,23 @@ struct StackLayout
class StackLayoutGenerator
{
public:
struct StackTooDeep
{
/// Number of slots that need to be saved.
size_t deficit = 0;
/// Set of variables, eliminating which would decrease the stack deficit.
std::vector<YulString> variableChoices;
};
static StackLayout run(CFG const& _cfg);
/// @returns a map from function names to the stack too deep errors occurring in that function.
/// Requires @a _cfg to be a control flow graph generated from disambiguated Yul.
/// The empty string is mapped to the stack too deep errors of the main entry point.
static std::map<YulString, std::vector<StackTooDeep>> reportStackTooDeep(CFG const& _cfg);
/// @returns all stack too deep errors in the function named @a _functionName.
/// Requires @a _cfg to be a control flow graph generated from disambiguated Yul.
/// If @a _functionName is empty, the stack too deep errors of the main entry point are reported instead.
static std::vector<StackTooDeep> reportStackTooDeep(CFG const& _cfg, YulString _functionName);
private:
StackLayoutGenerator(StackLayout& _context);
@ -55,11 +71,11 @@ private:
/// @returns the optimal entry stack layout, s.t. @a _operation can be applied to it and
/// the result can be transformed to @a _exitStack with minimal stack shuffling.
/// Simultaneously stores the entry layout required for executing the operation in m_layout.
Stack propagateStackThroughOperation(Stack _exitStack, CFG::Operation const& _operation);
Stack propagateStackThroughOperation(Stack _exitStack, CFG::Operation const& _operation, bool _aggressiveStackCompression = false);
/// @returns the desired stack layout at the entry of @a _block, assuming the layout after
/// executing the block should be @a _exitStack.
Stack propagateStackThroughBlock(Stack _exitStack, CFG::BasicBlock const& _block);
Stack propagateStackThroughBlock(Stack _exitStack, CFG::BasicBlock const& _block, bool _aggressiveStackCompression = false);
/// Main algorithm walking the graph from entry to exit and propagating back the stack layouts to the entries.
/// Iteratively reruns itself along backwards jumps until the layout is stabilized.
@ -86,6 +102,10 @@ private:
/// stack shuffling when starting from the returned layout.
static Stack combineStack(Stack const& _stack1, Stack const& _stack2);
/// Walks through the CFG and reports any stack too deep errors that would occur when generating code for it
/// without countermeasures.
std::vector<StackTooDeep> reportStackTooDeep(CFG::BasicBlock const& _entry) const;
/// @returns a copy of @a _stack stripped of all duplicates and slots that can be freely generated.
/// Attempts to create a layout that requires a minimal amount of operations to reconstruct the original
/// stack @a _stack.