diff --git a/libsolutil/CommonData.h b/libsolutil/CommonData.h index 8233a9f96..24fa67daa 100644 --- a/libsolutil/CommonData.h +++ b/libsolutil/CommonData.h @@ -340,7 +340,7 @@ namespace detail { template -auto findOffset(Container&& _container, Value&& _value, int) +auto findOffset(Container&& _container, Value&& _value, int) -> decltype(_container.find(_value) == _container.end(), std::optional()) { auto it = _container.find(std::forward(_value)); @@ -363,6 +363,10 @@ auto findOffset(Range&& _range, Value&& _value, void*) } +/// @returns an std::optional containing the offset of the first element in @a _range that is equal to @a _value, +/// if any, or std::nullopt otherwise. +/// Uses a linear search (``std::find``) unless @a _range is a container and provides a +/// suitable ``.find`` function (e.g. it will use the logarithmic ``.find`` function in ``std::set`` instead). template auto findOffset(Range&& _range, std::remove_reference_t const& _value) -> decltype(detail::findOffset(std::forward(_range), _value, 0)) diff --git a/libyul/backends/evm/StackHelpers.h b/libyul/backends/evm/StackHelpers.h index 7e5649499..fb5602aba 100644 --- a/libyul/backends/evm/StackHelpers.h +++ b/libyul/backends/evm/StackHelpers.h @@ -53,25 +53,68 @@ inline std::string stackToString(Stack const& _stack) return result; } + +// Abstraction of stack shuffling operations. Can be defined as actual concept once we switch to C++20. +/* template +concept ShuffleOperationConcept = requires(ShuffleOperations ops, size_t sourceOffset, size_t targetOffset, size_t depth) { + // Returns true, iff the current slot at sourceOffset in source layout is a suitable slot at targetOffset. + { ops.isCompatible(sourceOffset, targetOffset) } -> std::convertible_to; + // Returns true, iff the slots at the two given source offsets are identical. + { ops.souceIsSame(sourceOffset, sourceOffset) } -> std::convertible_to; + // Returns a positive integer n, if the slot at the given source offset needs n more copies. + // Returns a negative integer -n, if the slot at the given source offsets occurs n times too many. + // Returns zero if the amount of occurrences, in the current source layout, of the slot at the given source offset matches the desired amount of occurrences in the target. + { ops.sourceMultiplicity(sourceOffset) } -> std::convertible_to; + // Returns a positive integer n, if the slot at the given target offset needs n more copies. + // Returns a negative integer -n, if the slot at the given target offsets occurs n times too many. + // Returns zero if the amount of occurrences, in the current source layout, of the slot at the given target offset matches the desired amount of occurrences in the target. + { ops.targetMultiplicity(targetOffset) } -> std::convertible_to; + // Returns true, iff any slot is compatible with the given target offset. + { ops.targetIsArbitrary(targetOffset) } -> std::convertible_to; + // Returns the number of slots in the source layout. + { ops.sourceSize() } -> std::convertible_to; + // Returns the number of slots in the target layout. + { ops.targetSize() } -> std::convertible_to; + // Swaps the top most slot in the source with the slot at depth. + { ops.swap(depth) }; + // Pops the top most slot in the source. + { ops.pop() }; + // Dups or pushes the slot that is supposed to end up at the given target offset. + { ops.pushOrDupTarget(targetOffset) }; +}; +*/ +/// Helper class that can perform shuffling of a source stack layout to a target stack layout via +/// abstracted shuffle operations. +template class Shuffler { public: + /// Executes the stack shuffling operations. Instantiates an instance of ShuffleOperations + /// in each iteration. Each iteration performs exactly one operation that modifies the stack. + /// After shuffle, all slots in the source layout are guaranteed to be compatible to the slots + /// at the same target offset, but there may be additional slots in the target that are not + /// pushed/dupped yet. template static void shuffle(Args&&... args) { bool needsMoreShuffling = true; + // The shuffling algorithm should always terminate in polynomial time, but we provide a limit + // in case it does not terminate due to a bug. size_t iterationCount = 0; while (iterationCount < 1000 && (needsMoreShuffling = shuffleStep(std::forward(args)...))) ++iterationCount; yulAssert(!needsMoreShuffling, "Could not create stack layout after 1000 iterations."); } private: + /// Performs a single stack operation, transforming the source layout closer to the target layout. template static bool shuffleStep(Args&&... args) { ShuffleOperations ops{std::forward(args)...}; + // Terminates, if all slots in the source are compatible with the target. + // Note that there may still be more slots in the target. if (ranges::all_of( ranges::views::iota(0u, ops.sourceSize()), [&](size_t _index) { return ops.isCompatible(_index, _index); } @@ -195,6 +238,9 @@ private: } }; + +/// Transforms @a _currentStack to @a _targetStack, invoking the provided shuffling operations. +/// Modifies @a _currentStack itself after each invocation of the shuffling operations. template void createStackLayout(Stack& _currentStack, Stack const& _targetStack, Swap _swap, PushOrDup _pushOrDup, Pop _pop) { diff --git a/libyul/backends/evm/StackLayoutGenerator.cpp b/libyul/backends/evm/StackLayoutGenerator.cpp index ee7c5de5d..03b02bc0b 100644 --- a/libyul/backends/evm/StackLayoutGenerator.cpp +++ b/libyul/backends/evm/StackLayoutGenerator.cpp @@ -44,7 +44,9 @@ using namespace solidity; using namespace solidity::yul; using namespace std; -StackLayoutGenerator::StackLayoutGenerator(StackLayout& _layout): m_layout(_layout) +StackLayoutGenerator::StackLayoutGenerator(StackLayout& _layout, vector _currentFunctionReturnVariables): + m_layout(_layout), + m_currentFunctionReturnVariables(move(_currentFunctionReturnVariables)) { } @@ -265,43 +267,49 @@ list> StackLayoutGenerator: optional StackLayoutGenerator::getExitLayoutOrStageDependencies( CFG::BasicBlock const& _block, - set const& _blocksWithExitLayout, + set const& _visited, list& _toVisit ) const { return std::visit(util::GenericVisitor{ [&](CFG::BasicBlock::MainExit const&) -> std::optional { + // On the exit of the outermost block the stack can be empty. return Stack{}; }, [&](CFG::BasicBlock::Jump const& _jump) -> std::optional { if (_jump.backwards) { + // Choose the best currently known entry layout of the jump target as initial exit. + // Note that this may not yet be the final layout. if (auto* info = util::valueOrNullptr(m_layout.blockInfos, _jump.target)) return info->entryLayout; return Stack{}; } - if (_blocksWithExitLayout.count(_jump.target)) - { + // If the current iteration has already visited the jump target, start from its entry layout. + if (_visited.count(_jump.target)) return m_layout.blockInfos.at(_jump.target).entryLayout; - } + // Otherwise stage the jump target for visit and defer the current block. _toVisit.emplace_front(_jump.target); return nullopt; }, [&](CFG::BasicBlock::ConditionalJump const& _conditionalJump) -> std::optional { - bool zeroVisited = _blocksWithExitLayout.count(_conditionalJump.zero); - bool nonZeroVisited = _blocksWithExitLayout.count(_conditionalJump.nonZero); + bool zeroVisited = _visited.count(_conditionalJump.zero); + bool nonZeroVisited = _visited.count(_conditionalJump.nonZero); if (zeroVisited && nonZeroVisited) { + // If the current iteration has already visited both jump targets, start from its entry layout. Stack stack = combineStack( m_layout.blockInfos.at(_conditionalJump.zero).entryLayout, m_layout.blockInfos.at(_conditionalJump.nonZero).entryLayout ); + // Additionally, the jump condition has to be at the stack top at exit. stack.emplace_back(_conditionalJump.condition); return stack; } + // If one of the jump targets has not been visited, stage it for visit and defer the current block. if (!zeroVisited) _toVisit.emplace_front(_conditionalJump.zero); if (!nonZeroVisited) @@ -310,6 +318,7 @@ optional StackLayoutGenerator::getExitLayoutOrStageDependencies( }, [&](CFG::BasicBlock::FunctionReturn const& _functionReturn) -> std::optional { + // A function return needs the return variables and the function return label slot on stack. yulAssert(_functionReturn.info, ""); Stack stack = _functionReturn.info->returnVariables | ranges::views::transform([](auto const& _varSlot){ return StackSlot{_varSlot}; @@ -319,6 +328,7 @@ optional StackLayoutGenerator::getExitLayoutOrStageDependencies( }, [&](CFG::BasicBlock::Terminated const&) -> std::optional { + // A terminating block can have an empty stack on exit. return Stack{}; }, }, _block.exit); @@ -347,14 +357,6 @@ void StackLayoutGenerator::processEntryPoint(CFG::BasicBlock const& _entry) if (std::optional exitLayout = getExitLayoutOrStageDependencies(*block, visited, toVisit)) { visited.emplace(block); - // We can skip the visit, if we have seen this precise exit layout already last time. - // Note: if the entire graph is revisited in the backwards jump check below, doing - // this seems to break things; not sure why. - // Note: since I don't quite understand why doing this can break things, I comment - // it out for now, since not aborting in those cases should always be safe. - // if (auto* previousInfo = util::valueOrNullptr(m_layout.blockInfos, block)) - // if (previousInfo->exitLayout == *exitLayout) - // continue; auto& info = m_layout.blockInfos[block]; info.exitLayout = *exitLayout; info.entryLayout = propagateStackThroughBlock(info.exitLayout, *block); @@ -367,23 +369,25 @@ void StackLayoutGenerator::processEntryPoint(CFG::BasicBlock const& _entry) } // Determine which backwards jumps still require fixing and stage revisits of appropriate nodes. - for (auto [block, target]: backwardsJumps) + for (auto [jumpingBlock, target]: backwardsJumps) + // This block jumps backwards, but does not provide all slots required by the jump target on exit. + // Therefore we need to visit the subgraph between ``target`` and ``jumpingBlock`` again. if (ranges::any_of( m_layout.blockInfos[target].entryLayout, - [exitLayout = m_layout.blockInfos[block].exitLayout](StackSlot const& _slot) { + [exitLayout = m_layout.blockInfos[jumpingBlock].exitLayout](StackSlot const& _slot) { return !util::findOffset(exitLayout, _slot); } )) { - // This block jumps backwards, but does not provide all slots required by the jump target on exit. - // Therefore we need to visit the subgraph between ``target`` and ``block`` again. - // In particular we can visit backwards starting from ``block`` and mark all entries to-be-visited- + // In particular we can visit backwards starting from ``jumpingBlock`` and mark all entries to-be-visited- // again until we hit ``target``. - toVisit.emplace_front(block); - // Since we are likely to change the entry layout of ``target``, we also visit its entries again. + toVisit.emplace_front(jumpingBlock); + // Since we are likely to permute the entry layout of ``target``, we also visit its entries again. + // This is not required for correctness, since the set of stack slots will match, but it may move some + // required stack shuffling from the loop condition to outside the loop. for (CFG::BasicBlock const* entry: target->entries) visited.erase(entry); - util::BreadthFirstSearch{{block}}.run( + util::BreadthFirstSearch{{jumpingBlock}}.run( [&visited, target = target](CFG::BasicBlock const* _block, auto _addChild) { visited.erase(_block); if (_block == target) @@ -392,14 +396,11 @@ void StackLayoutGenerator::processEntryPoint(CFG::BasicBlock const& _entry) _addChild(entry); } ); - // TODO: while the above is enough, the layout of ``target`` might change in the process. // While the shuffled layout for ``target`` will be compatible, it can be worthwhile propagating // it further up once more. - // This would mean not stopping at _block == target above or even doing visited.clear() here, revisiting the entire graph. + // This would mean not stopping at _block == target above, resp. even doing visited.clear() here, revisiting the entire graph. // This is a tradeoff between the runtime of this process and the optimality of the result. // Also note that while visiting the entire graph again *can* be helpful, it can also be detrimental. - // Also note that for some reason using visited.clear() is incompatible with skipping the revisit - // of already seen exit layouts above, I'm not sure yet why. } } @@ -547,14 +548,10 @@ void StackLayoutGenerator::fixStackTooDeep(CFG::BasicBlock const&) StackLayout StackLayoutGenerator::run(CFG const& _cfg) { StackLayout stackLayout; - StackLayoutGenerator stackLayoutGenerator{stackLayout}; + StackLayoutGenerator{stackLayout, {}}.processEntryPoint(*_cfg.entry); - stackLayoutGenerator.processEntryPoint(*_cfg.entry); for (auto& functionInfo: _cfg.functionInfo | ranges::views::values) - { - stackLayoutGenerator.m_currentFunctionReturnVariables = functionInfo.returnVariables; - stackLayoutGenerator.processEntryPoint(*functionInfo.entry); - } + StackLayoutGenerator{stackLayout, functionInfo.returnVariables}.processEntryPoint(*functionInfo.entry); return stackLayout; } diff --git a/libyul/backends/evm/StackLayoutGenerator.h b/libyul/backends/evm/StackLayoutGenerator.h index 1a6b3ec73..4555724eb 100644 --- a/libyul/backends/evm/StackLayoutGenerator.h +++ b/libyul/backends/evm/StackLayoutGenerator.h @@ -50,7 +50,7 @@ public: static StackLayout run(CFG const& _cfg); private: - StackLayoutGenerator(StackLayout& _context); + StackLayoutGenerator(StackLayout& _context, std::vector _currentFunctionReturnVariables); /// @returns the optimal entry stack layout, s.t. @a _operation can be applied to it and /// the result can be transformed to @a _exitStack with minimal stack shuffling. @@ -64,12 +64,15 @@ private: /// Iteratively reruns itself along backwards jumps until the layout is stabilized. void processEntryPoint(CFG::BasicBlock const& _entry); + /// @returns the best known exit layout of @a _block, if all dependencies are already @a _visited. + /// If not, adds the dependencies to @a _dependencyList and @returns std::nullopt. std::optional getExitLayoutOrStageDependencies( CFG::BasicBlock const& _block, - std::set const& _blocksWithExitLayouts, + std::set const& _visited, std::list& _dependencyList ) const; + /// @returns a pair of ``{jumpingBlock, targetBlock}`` for each backwards jump in the graph starting at @a _entry. std::list> collectBackwardsJumps(CFG::BasicBlock const& _entry) const; /// After the main algorithms, layouts at conditional jumps are merely compatible, i.e. the exit layout of the @@ -86,10 +89,13 @@ private: /// attempts to reorganize the layout to avoid those cases. void fixStackTooDeep(CFG::BasicBlock const& _entry); + /// @returns a copy of @a _stack stripped of all duplicates and slots that can be freely generated. + /// Attempts to create a layout that requires a minimal amount of operations to reconstruct the original + /// stack @a _stack. static Stack compressStack(Stack _stack); StackLayout& m_layout; - std::vector m_currentFunctionReturnVariables; + std::vector const m_currentFunctionReturnVariables; }; }