mirror of
https://github.com/ethereum/solidity
synced 2023-10-03 13:03:40 +00:00
tmp
This commit is contained in:
parent
078d1ef39f
commit
9ed428912c
@ -400,6 +400,266 @@ void StackLayoutGenerator::processEntryPoint(CFG::BasicBlock const& _entry)
|
||||
}
|
||||
|
||||
stitchConditionalJumps(_entry);
|
||||
std::map<CFG::BasicBlock const*, bool> terminates;
|
||||
fillInJunk(_entry, terminates);
|
||||
}
|
||||
|
||||
bool StackLayoutGenerator::fillInJunk(CFG::BasicBlock const& _entry, std::map<CFG::BasicBlock const*, bool>& _terminates)
|
||||
{
|
||||
(void)_terminates;
|
||||
|
||||
util::BreadthFirstSearch<CFG::BasicBlock const*> reverseVisit;
|
||||
util::BreadthFirstSearch<CFG::BasicBlock const*>{{&_entry}}.run([&](CFG::BasicBlock const* _block, auto _addChild){
|
||||
std::visit(util::GenericVisitor{
|
||||
[&](CFG::BasicBlock::MainExit const&) {},
|
||||
[&](CFG::BasicBlock::Jump const& _jump) { _addChild(_jump.target); },
|
||||
[&](CFG::BasicBlock::ConditionalJump const& _conditionalJump)
|
||||
{
|
||||
_addChild(_conditionalJump.zero);
|
||||
_addChild(_conditionalJump.nonZero);
|
||||
},
|
||||
[&](CFG::BasicBlock::FunctionReturn const&)
|
||||
{
|
||||
reverseVisit.verticesToTraverse.emplace_back(_block);
|
||||
},
|
||||
[&](CFG::BasicBlock::Terminated const&) {},
|
||||
}, _entry.exit);
|
||||
});
|
||||
reverseVisit.run([&](CFG::BasicBlock const* _block, auto _addChild) {
|
||||
for (auto const* entry: _block->entries)
|
||||
_addChild(entry);
|
||||
});
|
||||
|
||||
set<CFG::BasicBlock const*> needsCleanStackOnExit = move(reverseVisit.visited);
|
||||
|
||||
auto addJunkVisit = [&, seen = set<CFG::BasicBlock const*>{}](CFG::BasicBlock const& _block, auto& _recurse) mutable -> void {
|
||||
if (seen.count(&_block))
|
||||
return;
|
||||
seen.insert(&_block);
|
||||
|
||||
if (!needsCleanStackOnExit.count(&_block))
|
||||
{
|
||||
Stack const &blockEntry = m_layout.blockInfos.at(&_block).entryLayout;
|
||||
size_t nextLayoutSize = _block.operations.empty() ?
|
||||
m_layout.blockInfos.at(&_block).exitLayout.size():
|
||||
m_layout.operationEntryLayout.at(&_block.operations.front()).size();
|
||||
if (nextLayoutSize < blockEntry.size())
|
||||
{
|
||||
size_t numJunk = blockEntry.size() - nextLayoutSize;
|
||||
for (auto const &operation: _block.operations)
|
||||
{
|
||||
Stack &operationLayout = m_layout.operationEntryLayout.at(&operation);
|
||||
operationLayout = Stack{numJunk, JunkSlot{}} + move(operationLayout);
|
||||
}
|
||||
Stack& exitLayout = m_layout.blockInfos.at(&_block).exitLayout;
|
||||
exitLayout = Stack{numJunk, JunkSlot{}} + move(exitLayout);
|
||||
}
|
||||
}
|
||||
Stack const& exitLayout = m_layout.blockInfos.at(&_block).exitLayout;
|
||||
|
||||
std::visit(util::GenericVisitor{
|
||||
[&](CFG::BasicBlock::MainExit const&)
|
||||
{
|
||||
},
|
||||
[&](CFG::BasicBlock::Jump const& _jump)
|
||||
{
|
||||
if (!_jump.backwards)
|
||||
{
|
||||
if (!needsCleanStackOnExit.count(_jump.target))
|
||||
{
|
||||
Stack &entryLayout = m_layout.blockInfos.at(_jump.target).entryLayout;
|
||||
if (entryLayout.size() < exitLayout.size())
|
||||
entryLayout =
|
||||
Stack{exitLayout.size() - entryLayout.size(), JunkSlot{}} + move(entryLayout);
|
||||
}
|
||||
_recurse(*_jump.target, _recurse);
|
||||
}
|
||||
},
|
||||
[&](CFG::BasicBlock::ConditionalJump const& _conditionalJump)
|
||||
{
|
||||
if (!needsCleanStackOnExit.count(_conditionalJump.zero) && !needsCleanStackOnExit.count(_conditionalJump.nonZero))
|
||||
{
|
||||
Stack &zeroEntryLayout = m_layout.blockInfos.at(_conditionalJump.zero).entryLayout;
|
||||
size_t zeroLayoutDiff = 0;
|
||||
if (zeroEntryLayout.size() < exitLayout.size() - 1)
|
||||
zeroLayoutDiff = exitLayout.size() - 1 - zeroEntryLayout.size();
|
||||
Stack &nonZeroEntryLayout = m_layout.blockInfos.at(_conditionalJump.nonZero).entryLayout;
|
||||
|
||||
yulAssert(nonZeroEntryLayout.size() == zeroEntryLayout.size(), "");
|
||||
|
||||
zeroEntryLayout = Stack{zeroLayoutDiff, JunkSlot{}} + move(zeroEntryLayout);
|
||||
nonZeroEntryLayout = Stack{zeroLayoutDiff, JunkSlot{}} + move(nonZeroEntryLayout);
|
||||
}
|
||||
_recurse(*_conditionalJump.zero, _recurse);
|
||||
_recurse(*_conditionalJump.nonZero, _recurse);
|
||||
},
|
||||
[&](CFG::BasicBlock::FunctionReturn const&)
|
||||
{
|
||||
},
|
||||
[&](CFG::BasicBlock::Terminated const&)
|
||||
{
|
||||
},
|
||||
}, _block.exit);
|
||||
};
|
||||
addJunkVisit(_entry, addJunkVisit);
|
||||
|
||||
return true;
|
||||
|
||||
#if 0
|
||||
|
||||
|
||||
if (bool const* knownTerminates = util::valueOrNullptr(_terminates, &_block))
|
||||
return *knownTerminates;
|
||||
|
||||
_terminates[&_block] = false;
|
||||
|
||||
bool terminates = std::visit(util::GenericVisitor{
|
||||
[&](CFG::BasicBlock::MainExit const&) -> bool
|
||||
{
|
||||
return true;
|
||||
},
|
||||
[&](CFG::BasicBlock::Jump const& _jump) -> bool
|
||||
{
|
||||
if (_jump.backwards)
|
||||
return false;
|
||||
return fillInJunk(*_jump.target, _terminates);
|
||||
},
|
||||
[&](CFG::BasicBlock::ConditionalJump const& _conditionalJump) -> bool
|
||||
{
|
||||
return fillInJunk(*_conditionalJump.zero, _terminates) && fillInJunk(*_conditionalJump.nonZero, _terminates);
|
||||
},
|
||||
[&](CFG::BasicBlock::FunctionReturn const&) -> bool
|
||||
{
|
||||
return false;
|
||||
},
|
||||
[&](CFG::BasicBlock::Terminated const&) -> bool
|
||||
{
|
||||
return true;
|
||||
},
|
||||
}, _block.exit);
|
||||
_terminates[&_block] = terminates;
|
||||
|
||||
if (terminates)
|
||||
{
|
||||
auto const &entryLayout = m_layout.blockInfos.at(&_block).entryLayout;
|
||||
|
||||
auto evaluate = [&](Stack const& _candidate) -> size_t {
|
||||
size_t numOps = 0;
|
||||
|
||||
auto swap = [&](unsigned _swapDepth)
|
||||
{
|
||||
++numOps;
|
||||
if (_swapDepth > 16) numOps += 1000;
|
||||
};
|
||||
auto dupOrPush = [&](StackSlot const& _slot)
|
||||
{
|
||||
++numOps;
|
||||
auto depth = util::findOffset(entryLayout | ranges::views::reverse, _slot);
|
||||
if (depth && *depth >= 16)
|
||||
numOps += 1000;
|
||||
};
|
||||
auto pop = [&]() { numOps++; };
|
||||
Stack testStack = entryLayout;
|
||||
createStackLayout(testStack, _candidate, swap, dupOrPush, pop);
|
||||
return numOps;
|
||||
};
|
||||
/*
|
||||
size_t maxEntrySize = 0;
|
||||
for (auto entry: _block.entries)
|
||||
{
|
||||
auto const &exitLayout = m_layout.blockInfos.at(entry).exitLayout;
|
||||
if (exitLayout.size() > maxEntrySize)
|
||||
maxEntrySize = exitLayout.size();
|
||||
}
|
||||
|
||||
auto evaluate = [&](Stack const& _candidate) -> size_t {
|
||||
size_t numOps = 0;
|
||||
for (auto entry: _block.entries)
|
||||
{
|
||||
Stack entryExitLayout = m_layout.blockInfos.at(entry).exitLayout;
|
||||
auto swap = [&](unsigned _swapDepth)
|
||||
{
|
||||
++numOps;
|
||||
if (_swapDepth > 16) numOps += 1000;
|
||||
};
|
||||
auto dupOrPush = [&](StackSlot const& _slot)
|
||||
{
|
||||
++numOps;
|
||||
auto depth = util::findOffset(entryExitLayout | ranges::views::reverse, _slot);
|
||||
if (depth && *depth >= 16)
|
||||
numOps += 1000;
|
||||
};
|
||||
auto pop = [&]() { numOps++; };
|
||||
createStackLayout(entryExitLayout, _candidate, swap, dupOrPush, pop);
|
||||
}
|
||||
return numOps;
|
||||
};*/
|
||||
|
||||
if (_block.operations.empty())
|
||||
return terminates;
|
||||
|
||||
Stack candidate = m_layout.operationEntryLayout.at(&_block.operations.front());
|
||||
|
||||
size_t bestCost = evaluate(candidate);
|
||||
Stack bestCandidate = candidate;
|
||||
while (entryLayout.size() > candidate.size())
|
||||
{
|
||||
candidate.insert(candidate.begin(), JunkSlot{});
|
||||
size_t cost = evaluate(candidate);
|
||||
if (cost < bestCost)
|
||||
{
|
||||
bestCost = cost;
|
||||
bestCandidate = candidate;
|
||||
}
|
||||
}
|
||||
|
||||
size_t junkToAdd = bestCandidate.size() - m_layout.operationEntryLayout.at(&_block.operations.front()).size();
|
||||
if (junkToAdd)
|
||||
{
|
||||
util::BreadthFirstSearch<CFG::BasicBlock const*>{{&_block}}.run([&](CFG::BasicBlock const* currentBlock, auto _addChild){
|
||||
for (auto& operation: currentBlock->operations)
|
||||
{
|
||||
Stack& operationLayout = m_layout.operationEntryLayout.at(&operation);
|
||||
operationLayout = Stack{junkToAdd, JunkSlot{}} + move(operationLayout);
|
||||
}
|
||||
Stack& blockExit = m_layout.blockInfos.at(currentBlock).exitLayout;
|
||||
blockExit = Stack{junkToAdd, JunkSlot{}} + move(blockExit);
|
||||
std::visit(util::GenericVisitor{
|
||||
[&](CFG::BasicBlock::MainExit const&)
|
||||
{
|
||||
},
|
||||
[&](CFG::BasicBlock::Jump const& _jump)
|
||||
{
|
||||
if (!_jump.backwards)
|
||||
{
|
||||
Stack& blockEntry = m_layout.blockInfos.at(_jump.target).entryLayout;
|
||||
blockEntry = Stack{junkToAdd, JunkSlot{}} + move(blockEntry);
|
||||
_addChild(_jump.target);
|
||||
}
|
||||
},
|
||||
[&](CFG::BasicBlock::ConditionalJump const& _conditionalJump)
|
||||
{
|
||||
|
||||
Stack& zeroEntry = m_layout.blockInfos.at(_conditionalJump.zero).entryLayout;
|
||||
zeroEntry = Stack{junkToAdd, JunkSlot{}} + move(zeroEntry);
|
||||
Stack& nonZeroEntry = m_layout.blockInfos.at(_conditionalJump.nonZero).entryLayout;
|
||||
nonZeroEntry = Stack{junkToAdd, JunkSlot{}} + move(nonZeroEntry);
|
||||
_addChild(_conditionalJump.zero);
|
||||
_addChild(_conditionalJump.nonZero);
|
||||
},
|
||||
[&](CFG::BasicBlock::FunctionReturn const&)
|
||||
{
|
||||
yulAssert(false, "");
|
||||
},
|
||||
[&](CFG::BasicBlock::Terminated const&)
|
||||
{
|
||||
},
|
||||
}, currentBlock->exit);
|
||||
});
|
||||
}
|
||||
}
|
||||
return terminates;
|
||||
#endif
|
||||
}
|
||||
|
||||
optional<Stack> StackLayoutGenerator::getExitLayoutOrStageDependencies(
|
||||
@ -504,6 +764,7 @@ void StackLayoutGenerator::stitchConditionalJumps(CFG::BasicBlock const& _block)
|
||||
[&](CFG::BasicBlock::MainExit const&) {},
|
||||
[&](CFG::BasicBlock::Jump const& _jump)
|
||||
{
|
||||
info.exitLayout = m_layout.blockInfos.at(_jump.target).entryLayout;
|
||||
if (!_jump.backwards)
|
||||
_addChild(_jump.target);
|
||||
},
|
||||
@ -703,3 +964,39 @@ Stack StackLayoutGenerator::compressStack(Stack _stack)
|
||||
while (firstDupOffset);
|
||||
return _stack;
|
||||
}
|
||||
void StackLayoutGenerator::addJunkRecursive(CFG::BasicBlock const& _block, size_t _numJunk, std::set<CFG::BasicBlock const*>& _seen)
|
||||
{
|
||||
if (_seen.count(&_block))
|
||||
return;
|
||||
_seen.insert(&_block);
|
||||
|
||||
for (auto const& operation: _block.operations)
|
||||
{
|
||||
Stack& operationLayout = m_layout.operationEntryLayout.at(&operation);
|
||||
operationLayout = Stack{_numJunk, JunkSlot{}} + move(operationLayout);
|
||||
}
|
||||
Stack& exitLayout = m_layout.blockInfos.at(&_block).exitLayout;
|
||||
exitLayout = Stack{_numJunk, JunkSlot{}} + move(exitLayout);
|
||||
|
||||
std::visit(util::GenericVisitor{
|
||||
[&](CFG::BasicBlock::MainExit const&) {},
|
||||
[&](CFG::BasicBlock::Jump const& _jump)
|
||||
{
|
||||
Stack& entryLayout = m_layout.blockInfos.at(_jump.target).entryLayout;
|
||||
entryLayout = Stack{_numJunk, JunkSlot{}} + move(entryLayout);
|
||||
if (!_jump.backwards)
|
||||
addJunkRecursive(*_jump.target, _numJunk, _seen);
|
||||
},
|
||||
[&](CFG::BasicBlock::ConditionalJump const& _conditionalJump)
|
||||
{
|
||||
Stack& zeroEntryLayout = m_layout.blockInfos.at(_conditionalJump.zero).entryLayout;
|
||||
zeroEntryLayout = Stack{_numJunk, JunkSlot{}} + move(zeroEntryLayout);
|
||||
Stack& nonZeroEntryLayout = m_layout.blockInfos.at(_conditionalJump.nonZero).entryLayout;
|
||||
nonZeroEntryLayout = Stack{_numJunk, JunkSlot{}} + move(nonZeroEntryLayout);
|
||||
addJunkRecursive(*_conditionalJump.zero, _numJunk, _seen);
|
||||
addJunkRecursive(*_conditionalJump.nonZero, _numJunk, _seen);
|
||||
},
|
||||
[&](CFG::BasicBlock::FunctionReturn const&) { yulAssert(false, ""); },
|
||||
[&](CFG::BasicBlock::Terminated const&) {},
|
||||
}, _block.exit);
|
||||
}
|
@ -98,6 +98,8 @@ private:
|
||||
/// exactly, except that slots not required after the jump are marked as `JunkSlot`s.
|
||||
void stitchConditionalJumps(CFG::BasicBlock const& _block);
|
||||
|
||||
bool fillInJunk(CFG::BasicBlock const& _block, std::map<CFG::BasicBlock const*, bool>& _terminates);
|
||||
|
||||
/// Calculates the ideal stack layout, s.t. both @a _stack1 and @a _stack2 can be achieved with minimal
|
||||
/// stack shuffling when starting from the returned layout.
|
||||
static Stack combineStack(Stack const& _stack1, Stack const& _stack2);
|
||||
@ -111,6 +113,8 @@ private:
|
||||
/// stack @a _stack.
|
||||
static Stack compressStack(Stack _stack);
|
||||
|
||||
void addJunkRecursive(CFG::BasicBlock const& _block, size_t _numJunk, std::set<CFG::BasicBlock const*>& _seen);
|
||||
|
||||
StackLayout& m_layout;
|
||||
};
|
||||
|
||||
|
@ -162,7 +162,7 @@
|
||||
// [ c RET b a ]\l\
|
||||
// sstore\l\
|
||||
// [ c RET ]\l\
|
||||
// [ c RET ]\l\
|
||||
// [ c RET JUNK JUNK JUNK ]\l\
|
||||
// "];
|
||||
// Block8 -> Block8Exit [arrowhead=none];
|
||||
// Block8Exit [label="Jump" shape=oval];
|
||||
@ -296,7 +296,7 @@
|
||||
// [ c RET x b a 0xffff 0xff ]\l\
|
||||
// sstore\l\
|
||||
// [ c RET x b a ]\l\
|
||||
// [ c RET x b a ]\l\
|
||||
// [ c RET a b x ]\l\
|
||||
// "];
|
||||
// Block19 -> Block19Exit [arrowhead=none];
|
||||
// Block19Exit [label="BackwardsJump" shape=oval];
|
||||
|
@ -85,21 +85,21 @@
|
||||
//
|
||||
// Block4 [label="\
|
||||
// [ JUNK JUNK ]\l\
|
||||
// [ 0x42 ]\l\
|
||||
// [ JUNK 0x42 ]\l\
|
||||
// Assignment(y)\l\
|
||||
// [ y ]\l\
|
||||
// [ y ]\l\
|
||||
// [ JUNK y ]\l\
|
||||
// [ JUNK y ]\l\
|
||||
// "];
|
||||
// Block4 -> Block4Exit [arrowhead=none];
|
||||
// Block4Exit [label="Jump" shape=oval];
|
||||
// Block4Exit -> Block5;
|
||||
//
|
||||
// Block5 [label="\
|
||||
// [ y ]\l\
|
||||
// [ y 0x0404 ]\l\
|
||||
// [ JUNK y ]\l\
|
||||
// [ JUNK y 0x0404 ]\l\
|
||||
// sstore\l\
|
||||
// [ ]\l\
|
||||
// [ ]\l\
|
||||
// [ JUNK ]\l\
|
||||
// [ JUNK ]\l\
|
||||
// "];
|
||||
// Block5Exit [label="MainExit"];
|
||||
// Block5 -> Block5Exit;
|
||||
|
16
test/libyul/yulStackLayout/weird_popping.yul
Normal file
16
test/libyul/yulStackLayout/weird_popping.yul
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
let x_1 := calldataload(0)
|
||||
let x_2 := calldataload(1)
|
||||
let x_3 := calldataload(2)
|
||||
let x_4 := calldataload(3)
|
||||
let x_5 := calldataload(4)
|
||||
if lt(x_1, x_2) {
|
||||
let z := 42
|
||||
if lt(calldataload(5), calldataload(6)) {
|
||||
z := mul(z, 2)
|
||||
}
|
||||
revert(z, z)
|
||||
}
|
||||
sstore(add(x_1, add(x_2, add(x_3, add(x_4, add(x_5, 0))))), 42)
|
||||
}
|
||||
// ----
|
Loading…
Reference in New Issue
Block a user