mirror of
https://github.com/ethereum/solidity
synced 2023-10-03 13:03:40 +00:00
[yul-phaser] Selections+PairSelections: Add RandomSubset and PairsFromRandomSubset
This commit is contained in:
parent
7381068dcc
commit
b6f8ecf755
@ -119,6 +119,78 @@ BOOST_AUTO_TEST_CASE(materialise_should_return_no_pairs_if_collection_has_one_el
|
||||
BOOST_TEST(RandomPairSelection(2.0).materialise(1).empty());
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
BOOST_AUTO_TEST_SUITE(PairsFromRandomSubsetTest)
|
||||
|
||||
BOOST_AUTO_TEST_CASE(materialise_should_return_random_values_with_equal_probabilities)
|
||||
{
|
||||
constexpr int collectionSize = 1000;
|
||||
constexpr double selectionChance = 0.7;
|
||||
constexpr double relativeTolerance = 0.001;
|
||||
constexpr double expectedValue = selectionChance;
|
||||
constexpr double variance = selectionChance * (1 - selectionChance);
|
||||
|
||||
SimulationRNG::reset(1);
|
||||
vector<tuple<size_t, size_t>> pairs = PairsFromRandomSubset(selectionChance).materialise(collectionSize);
|
||||
vector<double> bernoulliTrials(collectionSize, 0);
|
||||
for (auto& pair: pairs)
|
||||
{
|
||||
BOOST_REQUIRE(get<1>(pair) < collectionSize);
|
||||
BOOST_REQUIRE(get<1>(pair) < collectionSize);
|
||||
bernoulliTrials[get<0>(pair)] = 1.0;
|
||||
bernoulliTrials[get<1>(pair)] = 1.0;
|
||||
}
|
||||
|
||||
BOOST_TEST(abs(mean(bernoulliTrials) - expectedValue) < expectedValue * relativeTolerance);
|
||||
BOOST_TEST(abs(meanSquaredError(bernoulliTrials, expectedValue) - variance) < variance * relativeTolerance);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(materialise_should_return_only_values_that_can_be_used_as_collection_indices)
|
||||
{
|
||||
const size_t collectionSize = 200;
|
||||
constexpr double selectionChance = 0.5;
|
||||
|
||||
vector<tuple<size_t, size_t>> pairs = PairsFromRandomSubset(selectionChance).materialise(collectionSize);
|
||||
|
||||
BOOST_TEST(all_of(pairs.begin(), pairs.end(), [&](auto const& pair){ return get<0>(pair) <= collectionSize; }));
|
||||
BOOST_TEST(all_of(pairs.begin(), pairs.end(), [&](auto const& pair){ return get<1>(pair) <= collectionSize; }));
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(materialise_should_use_unique_indices)
|
||||
{
|
||||
constexpr size_t collectionSize = 200;
|
||||
constexpr double selectionChance = 0.5;
|
||||
|
||||
vector<tuple<size_t, size_t>> pairs = PairsFromRandomSubset(selectionChance).materialise(collectionSize);
|
||||
set<size_t> indices;
|
||||
for (auto& pair: pairs)
|
||||
{
|
||||
indices.insert(get<0>(pair));
|
||||
indices.insert(get<1>(pair));
|
||||
}
|
||||
|
||||
BOOST_TEST(indices.size() == 2 * pairs.size());
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(materialise_should_return_no_indices_if_collection_is_empty)
|
||||
{
|
||||
BOOST_TEST(PairsFromRandomSubset(0.0).materialise(0).empty());
|
||||
BOOST_TEST(PairsFromRandomSubset(0.5).materialise(0).empty());
|
||||
BOOST_TEST(PairsFromRandomSubset(1.0).materialise(0).empty());
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(materialise_should_return_no_pairs_if_selection_chance_is_zero)
|
||||
{
|
||||
BOOST_TEST(PairsFromRandomSubset(0.0).materialise(0).empty());
|
||||
BOOST_TEST(PairsFromRandomSubset(0.0).materialise(100).empty());
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(materialise_should_return_all_pairs_if_selection_chance_is_one)
|
||||
{
|
||||
BOOST_TEST(PairsFromRandomSubset(1.0).materialise(0).empty());
|
||||
BOOST_TEST(PairsFromRandomSubset(1.0).materialise(100).size() == 50);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
BOOST_AUTO_TEST_SUITE(PairMosaicSelectionTest)
|
||||
|
||||
|
@ -25,9 +25,11 @@
|
||||
#include <boost/test/unit_test.hpp>
|
||||
|
||||
#include <algorithm>
|
||||
#include <set>
|
||||
#include <vector>
|
||||
|
||||
using namespace std;
|
||||
using namespace solidity::util;
|
||||
|
||||
namespace solidity::phaser::test
|
||||
{
|
||||
@ -199,6 +201,60 @@ BOOST_AUTO_TEST_CASE(materialise_should_return_no_indices_if_collection_is_empty
|
||||
BOOST_TEST(RandomSelection(2.0).materialise(0).empty());
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
BOOST_AUTO_TEST_SUITE(RandomSubsetTest)
|
||||
|
||||
BOOST_AUTO_TEST_CASE(materialise_should_return_random_values_with_equal_probabilities)
|
||||
{
|
||||
constexpr int collectionSize = 1000;
|
||||
constexpr double selectionChance = 0.7;
|
||||
constexpr double relativeTolerance = 0.001;
|
||||
constexpr double expectedValue = selectionChance;
|
||||
constexpr double variance = selectionChance * (1 - selectionChance);
|
||||
|
||||
SimulationRNG::reset(1);
|
||||
auto indices = convertContainer<set<size_t>>(RandomSubset(selectionChance).materialise(collectionSize));
|
||||
|
||||
vector<double> bernoulliTrials(collectionSize);
|
||||
for (size_t i = 0; i < collectionSize; ++i)
|
||||
bernoulliTrials[i] = indices.count(i);
|
||||
|
||||
BOOST_TEST(abs(mean(bernoulliTrials) - expectedValue) < expectedValue * relativeTolerance);
|
||||
BOOST_TEST(abs(meanSquaredError(bernoulliTrials, expectedValue) - variance) < variance * relativeTolerance);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(materialise_should_return_only_values_that_can_be_used_as_collection_indices)
|
||||
{
|
||||
const size_t collectionSize = 200;
|
||||
vector<size_t> indices = RandomSubset(0.5).materialise(collectionSize);
|
||||
|
||||
BOOST_TEST(all_of(indices.begin(), indices.end(), [&](auto const& index){ return index <= collectionSize; }));
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(materialise_should_return_indices_in_the_same_order_they_are_in_the_container)
|
||||
{
|
||||
const size_t collectionSize = 200;
|
||||
vector<size_t> indices = RandomSubset(0.5).materialise(collectionSize);
|
||||
|
||||
for (size_t i = 1; i < indices.size(); ++i)
|
||||
BOOST_TEST(indices[i - 1] < indices[i]);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(materialise_should_return_no_indices_if_collection_is_empty)
|
||||
{
|
||||
BOOST_TEST(RandomSubset(0.5).materialise(0).empty());
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(materialise_should_return_no_indices_if_selection_chance_is_zero)
|
||||
{
|
||||
BOOST_TEST(RandomSubset(0.0).materialise(10).empty());
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(materialise_should_return_all_indices_if_selection_chance_is_one)
|
||||
{
|
||||
BOOST_TEST(RandomSubset(1.0).materialise(10).size() == 10);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
BOOST_AUTO_TEST_SUITE_END()
|
||||
|
@ -17,6 +17,7 @@
|
||||
|
||||
#include <tools/yulPhaser/PairSelections.h>
|
||||
|
||||
#include <tools/yulPhaser/Selections.h>
|
||||
#include <tools/yulPhaser/SimulationRNG.h>
|
||||
|
||||
#include <cmath>
|
||||
@ -47,6 +48,43 @@ vector<tuple<size_t, size_t>> RandomPairSelection::materialise(size_t _poolSize)
|
||||
return selection;
|
||||
}
|
||||
|
||||
vector<tuple<size_t, size_t>> PairsFromRandomSubset::materialise(size_t _poolSize) const
|
||||
{
|
||||
vector<size_t> selectedIndices = RandomSubset(m_selectionChance).materialise(_poolSize);
|
||||
|
||||
if (selectedIndices.size() % 2 != 0)
|
||||
{
|
||||
if (selectedIndices.size() < _poolSize && SimulationRNG::bernoulliTrial(0.5))
|
||||
{
|
||||
do
|
||||
{
|
||||
size_t extraIndex = SimulationRNG::uniformInt(0, selectedIndices.size() - 1);
|
||||
if (find(selectedIndices.begin(), selectedIndices.end(), extraIndex) == selectedIndices.end())
|
||||
selectedIndices.push_back(extraIndex);
|
||||
} while (selectedIndices.size() % 2 != 0);
|
||||
}
|
||||
else
|
||||
selectedIndices.erase(selectedIndices.begin() + SimulationRNG::uniformInt(0, selectedIndices.size() - 1));
|
||||
}
|
||||
assert(selectedIndices.size() % 2 == 0);
|
||||
|
||||
vector<tuple<size_t, size_t>> selectedPairs;
|
||||
for (size_t i = selectedIndices.size() / 2; i > 0; --i)
|
||||
{
|
||||
size_t position1 = SimulationRNG::uniformInt(0, selectedIndices.size() - 1);
|
||||
size_t value1 = selectedIndices[position1];
|
||||
selectedIndices.erase(selectedIndices.begin() + position1);
|
||||
size_t position2 = SimulationRNG::uniformInt(0, selectedIndices.size() - 1);
|
||||
size_t value2 = selectedIndices[position2];
|
||||
selectedIndices.erase(selectedIndices.begin() + position2);
|
||||
|
||||
selectedPairs.push_back({value1, value2});
|
||||
}
|
||||
assert(selectedIndices.size() == 0);
|
||||
|
||||
return selectedPairs;
|
||||
}
|
||||
|
||||
vector<tuple<size_t, size_t>> PairMosaicSelection::materialise(size_t _poolSize) const
|
||||
{
|
||||
if (_poolSize < 2)
|
||||
|
@ -69,6 +69,28 @@ private:
|
||||
double m_selectionSize;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* A selection that goes over all elements in a container, for each one independently decides
|
||||
* whether to select it or not and then randomly combines those elements into pairs. If the number
|
||||
* of elements is odd, randomly decides whether to take one more or exclude one.
|
||||
*
|
||||
* Each element has the same chance of being selected and can be selected at most once.
|
||||
* The number of selected elements is random and can be different with each call to
|
||||
* @a materialise().
|
||||
*/
|
||||
class PairsFromRandomSubset: public PairSelection
|
||||
{
|
||||
public:
|
||||
explicit PairsFromRandomSubset(double _selectionChance):
|
||||
m_selectionChance(_selectionChance) {}
|
||||
|
||||
std::vector<std::tuple<size_t, size_t>> materialise(size_t _poolSize) const override;
|
||||
|
||||
private:
|
||||
double m_selectionChance;
|
||||
};
|
||||
|
||||
/**
|
||||
* A selection that selects pairs of elements at specific, fixed positions indicated by a repeating
|
||||
* "pattern". If the positions in the pattern exceed the size of the container, they are capped at
|
||||
|
@ -20,6 +20,7 @@
|
||||
#include <tools/yulPhaser/SimulationRNG.h>
|
||||
|
||||
#include <cmath>
|
||||
#include <numeric>
|
||||
|
||||
using namespace std;
|
||||
using namespace solidity::phaser;
|
||||
@ -58,3 +59,12 @@ vector<size_t> RandomSelection::materialise(size_t _poolSize) const
|
||||
return selection;
|
||||
}
|
||||
|
||||
vector<size_t> RandomSubset::materialise(size_t _poolSize) const
|
||||
{
|
||||
vector<size_t> selection;
|
||||
for (size_t index = 0; index < _poolSize; ++index)
|
||||
if (SimulationRNG::bernoulliTrial(m_selectionChance))
|
||||
selection.push_back(index);
|
||||
|
||||
return selection;
|
||||
}
|
||||
|
@ -118,4 +118,26 @@ private:
|
||||
double m_selectionSize;
|
||||
};
|
||||
|
||||
/**
|
||||
* A selection that goes over all elements in a container, for each one independently deciding
|
||||
* whether to select it or not. Each element has the same chance of being selected and can be
|
||||
* selected at most once. The order of selected elements is the same as the order of elements in
|
||||
* the container. The number of selected elements is random and can be different with each call
|
||||
* to @a materialise().
|
||||
*/
|
||||
class RandomSubset: public Selection
|
||||
{
|
||||
public:
|
||||
explicit RandomSubset(double _selectionChance):
|
||||
m_selectionChance(_selectionChance)
|
||||
{
|
||||
assert(0.0 <= _selectionChance && _selectionChance <= 1.0);
|
||||
}
|
||||
|
||||
std::vector<size_t> materialise(size_t _poolSize) const override;
|
||||
|
||||
private:
|
||||
double m_selectionChance;
|
||||
};
|
||||
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user