From c23b18926f9bdd55145989399367ee1b729bb371 Mon Sep 17 00:00:00 2001 From: Daniel Kirchner Date: Wed, 18 Aug 2021 13:06:11 +0200 Subject: [PATCH] Adjusted stack compression for testing. --- libyul/backends/evm/StackLayoutGenerator.cpp | 101 ++++++++++--------- libyul/backends/evm/StackLayoutGenerator.h | 2 +- 2 files changed, 54 insertions(+), 49 deletions(-) diff --git a/libyul/backends/evm/StackLayoutGenerator.cpp b/libyul/backends/evm/StackLayoutGenerator.cpp index b43c10dea..49b4e8037 100644 --- a/libyul/backends/evm/StackLayoutGenerator.cpp +++ b/libyul/backends/evm/StackLayoutGenerator.cpp @@ -100,6 +100,38 @@ StackLayoutGenerator::StackLayoutGenerator(StackLayout& _layout): m_layout(_layo namespace { +vector findStackTooDeep(Stack const& _source, Stack const& _target) +{ + Stack currentStack = _source; + vector stackTooDeepErrors; + auto getVariableChoices = [](auto&& range) { + set result; + for (auto const& slot: range) + if (auto const* variableSlot = get_if(&slot)) + result.insert(variableSlot->variable.get().name); + return result; + }; + ::createStackLayout(currentStack, _target, [&](unsigned _i) { + if (_i > 16) + stackTooDeepErrors.emplace_back(StackLayoutGenerator::StackTooDeep{ + _i - 16, + getVariableChoices(currentStack | ranges::views::take_last(_i + 1)) + }); + }, [&](StackSlot const& _slot) { + if (canBeFreelyGenerated(_slot)) + return; + if ( + auto depth = util::findOffset(currentStack | ranges::views::reverse, _slot); + depth && *depth >= 16 + ) + stackTooDeepErrors.emplace_back(StackLayoutGenerator::StackTooDeep{ + *depth - 15, + getVariableChoices(currentStack | ranges::views::take_last(*depth + 1)) + }); + }, [&]() {}); + return stackTooDeepErrors; + +} /// @returns the ideal stack to have before executing an operation that outputs @a _operationOutput, s.t. /// shuffling to @a _post is cheap. @@ -229,13 +261,11 @@ Stack createIdealLayout(Stack const& _operationOutput, Stack const& _post, Calla } } -Stack StackLayoutGenerator::propagateStackThroughOperation(Stack _exitStack, CFG::Operation const& _operation) +Stack StackLayoutGenerator::propagateStackThroughOperation(Stack _exitStack, CFG::Operation const& _operation, bool _aggressiveStackCompression) { // This is a huge tradeoff between code size, gas cost and stack size. - auto generateSlotOnTheFly = [&](StackSlot const&) { - //return stack.size() > 12 && canBeFreelyGenerated(_slot); - // return canBeFreelyGenerated(_slot); - return false; + auto generateSlotOnTheFly = [&](StackSlot const& _slot) { + return _aggressiveStackCompression && canBeFreelyGenerated(_slot); }; // Determine the ideal permutation of the slots in _exitLayout that are not operation outputs (and not to be @@ -273,18 +303,29 @@ Stack StackLayoutGenerator::propagateStackThroughOperation(Stack _exitStack, CFG break; } - // TODO: there may be a better criterion than overall stack size. - if (stack.size() > 12) - // Deduplicate and remove slots that can be freely generated. - stack = compressStack(move(stack)); return stack; } Stack StackLayoutGenerator::propagateStackThroughBlock(Stack _exitStack, CFG::BasicBlock const& _block) { - Stack stack = std::move(_exitStack); - for (auto& operation: _block.operations | ranges::views::reverse) - stack = propagateStackThroughOperation(stack, operation); + vector stackErrors; + Stack stack = _exitStack; + for (auto&& [idx, operation]: _block.operations | ranges::views::enumerate | ranges::views::reverse) + { + Stack newStack = propagateStackThroughOperation(stack, operation); + stackErrors += findStackTooDeep(newStack, stack); + stack = std::move(newStack); + + } + + // If we had stack errors, run again with aggressive stack compression. + if (!stackErrors.empty()) + { + stack = _exitStack; + for (auto&& [idx, operation]: _block.operations | ranges::views::enumerate | ranges::views::reverse) + stack = propagateStackThroughOperation(stack, operation, true); + } + return stack; } @@ -590,42 +631,6 @@ Stack StackLayoutGenerator::combineStack(Stack const& _stack1, Stack const& _sta return commonPrefix + bestCandidate; } -namespace -{ -vector findStackTooDeep(Stack const& _source, Stack const& _target) -{ - Stack currentStack = _source; - vector stackTooDeepErrors; - auto getVariableChoices = [](auto&& range) { - set result; - for (auto const& slot: range) - if (auto const* variableSlot = get_if(&slot)) - result.insert(variableSlot->variable.get().name); - return result; - }; - ::createStackLayout(currentStack, _target, [&](unsigned _i) { - if (_i > 16) - stackTooDeepErrors.emplace_back(StackLayoutGenerator::StackTooDeep{ - _i - 16, - getVariableChoices(currentStack | ranges::views::take_last(_i + 1)) - }); - }, [&](StackSlot const& _slot) { - if (canBeFreelyGenerated(_slot)) - return; - if ( - auto depth = util::findOffset(currentStack | ranges::views::reverse, _slot); - depth && *depth >= 16 - ) - stackTooDeepErrors.emplace_back(StackLayoutGenerator::StackTooDeep{ - *depth - 15, - getVariableChoices(currentStack | ranges::views::take_last(*depth + 1)) - }); - }, [&]() {}); - return stackTooDeepErrors; - -} -} - vector StackLayoutGenerator::reportStackTooDeep(CFG::BasicBlock const& _entry) { vector stackTooDeepErrors; diff --git a/libyul/backends/evm/StackLayoutGenerator.h b/libyul/backends/evm/StackLayoutGenerator.h index 2cd2d66ab..f3eba3a18 100644 --- a/libyul/backends/evm/StackLayoutGenerator.h +++ b/libyul/backends/evm/StackLayoutGenerator.h @@ -65,7 +65,7 @@ private: /// @returns the optimal entry stack layout, s.t. @a _operation can be applied to it and /// the result can be transformed to @a _exitStack with minimal stack shuffling. /// Simultaneously stores the entry layout required for executing the operation in m_layout. - Stack propagateStackThroughOperation(Stack _exitStack, CFG::Operation const& _operation); + Stack propagateStackThroughOperation(Stack _exitStack, CFG::Operation const& _operation, bool _aggressiveStackCompression = false); /// @returns the desired stack layout at the entry of @a _block, assuming the layout after /// executing the block should be @a _exitStack.