2016-11-14 22:12:14 +00:00
|
|
|
/*
|
2017-02-02 10:06:28 +00:00
|
|
|
This file is part of solidity.
|
2016-11-14 22:12:14 +00:00
|
|
|
|
2017-02-02 10:06:28 +00:00
|
|
|
solidity is free software: you can redistribute it and/or modify
|
2016-11-14 22:12:14 +00:00
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
2017-02-02 10:06:28 +00:00
|
|
|
solidity is distributed in the hope that it will be useful,
|
2016-11-14 22:12:14 +00:00
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
2017-02-02 10:06:28 +00:00
|
|
|
along with solidity. If not, see <http://www.gnu.org/licenses/>.
|
2016-11-14 22:12:14 +00:00
|
|
|
*/
|
2020-07-17 14:54:12 +00:00
|
|
|
// SPDX-License-Identifier: GPL-3.0
|
2016-11-14 22:12:14 +00:00
|
|
|
/** @file SwarmHash.cpp
|
|
|
|
*/
|
|
|
|
|
2020-01-06 10:52:23 +00:00
|
|
|
#include <libsolutil/SwarmHash.h>
|
2016-11-14 22:12:14 +00:00
|
|
|
|
2020-01-06 10:52:23 +00:00
|
|
|
#include <libsolutil/Keccak256.h>
|
2016-11-14 22:12:14 +00:00
|
|
|
|
2019-12-11 16:31:36 +00:00
|
|
|
using namespace solidity;
|
|
|
|
using namespace solidity::util;
|
2016-11-14 22:12:14 +00:00
|
|
|
|
2017-08-29 00:01:47 +00:00
|
|
|
namespace
|
|
|
|
{
|
2016-11-14 22:12:14 +00:00
|
|
|
|
2021-01-13 14:46:52 +00:00
|
|
|
bytes toLittleEndian(uint64_t _size)
|
2016-11-14 22:12:14 +00:00
|
|
|
{
|
|
|
|
bytes encoded(8);
|
|
|
|
for (size_t i = 0; i < 8; ++i)
|
|
|
|
encoded[i] = (_size >> (8 * i)) & 0xff;
|
|
|
|
return encoded;
|
|
|
|
}
|
|
|
|
|
|
|
|
h256 swarmHashSimple(bytesConstRef _data, size_t _size)
|
|
|
|
{
|
|
|
|
return keccak256(toLittleEndian(_size) + _data.toBytes());
|
|
|
|
}
|
|
|
|
|
2023-08-17 13:53:20 +00:00
|
|
|
h256 swarmHashIntermediate(std::string const& _input, size_t _offset, size_t _length)
|
2016-11-14 22:12:14 +00:00
|
|
|
{
|
2016-11-14 10:46:43 +00:00
|
|
|
bytesConstRef ref;
|
|
|
|
bytes innerNodes;
|
2016-11-15 13:55:19 +00:00
|
|
|
if (_length <= 0x1000)
|
2016-11-14 10:46:43 +00:00
|
|
|
ref = bytesConstRef(_input).cropped(_offset, _length);
|
2016-11-15 13:55:19 +00:00
|
|
|
else
|
2016-11-14 22:12:14 +00:00
|
|
|
{
|
2016-11-15 13:55:19 +00:00
|
|
|
size_t maxRepresentedSize = 0x1000;
|
|
|
|
while (maxRepresentedSize * (0x1000 / 32) < _length)
|
|
|
|
maxRepresentedSize *= (0x1000 / 32);
|
|
|
|
for (size_t i = 0; i < _length; i += maxRepresentedSize)
|
2016-11-14 22:12:14 +00:00
|
|
|
{
|
2016-11-15 13:55:19 +00:00
|
|
|
size_t size = std::min(maxRepresentedSize, _length - i);
|
|
|
|
innerNodes += swarmHashIntermediate(_input, _offset + i, size).asBytes();
|
2016-11-14 22:12:14 +00:00
|
|
|
}
|
2016-11-14 10:46:43 +00:00
|
|
|
ref = bytesConstRef(&innerNodes);
|
2016-11-14 22:12:14 +00:00
|
|
|
}
|
2016-11-14 10:46:43 +00:00
|
|
|
return swarmHashSimple(ref, _length);
|
2016-11-15 13:55:19 +00:00
|
|
|
}
|
|
|
|
|
2019-05-07 08:25:22 +00:00
|
|
|
h256 bmtHash(bytesConstRef _data)
|
|
|
|
{
|
|
|
|
if (_data.size() <= 64)
|
|
|
|
return keccak256(_data);
|
|
|
|
|
|
|
|
size_t midPoint = _data.size() / 2;
|
|
|
|
return keccak256(
|
|
|
|
bmtHash(_data.cropped(0, midPoint)).asBytes() +
|
|
|
|
bmtHash(_data.cropped(midPoint)).asBytes()
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
h256 chunkHash(bytesConstRef const _data, bool _forceHigherLevel = false)
|
|
|
|
{
|
|
|
|
bytes dataToHash;
|
|
|
|
if (_data.size() < 0x1000)
|
|
|
|
dataToHash = _data.toBytes();
|
|
|
|
else if (_data.size() == 0x1000 && !_forceHigherLevel)
|
|
|
|
dataToHash = _data.toBytes();
|
|
|
|
else
|
|
|
|
{
|
|
|
|
size_t maxRepresentedSize = 0x1000;
|
|
|
|
while (maxRepresentedSize * (0x1000 / 32) < _data.size())
|
|
|
|
maxRepresentedSize *= (0x1000 / 32);
|
|
|
|
// If remaining size is 0x1000, but maxRepresentedSize is not,
|
|
|
|
// we have to still do one level of the chunk hashes.
|
|
|
|
bool forceHigher = maxRepresentedSize > 0x1000;
|
|
|
|
for (size_t i = 0; i < _data.size(); i += maxRepresentedSize)
|
|
|
|
{
|
|
|
|
size_t size = std::min(maxRepresentedSize, _data.size() - i);
|
|
|
|
dataToHash += chunkHash(_data.cropped(i, size), forceHigher).asBytes();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dataToHash.resize(0x1000, 0);
|
|
|
|
return keccak256(toLittleEndian(_data.size()) + bmtHash(&dataToHash).asBytes());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-08-29 00:01:47 +00:00
|
|
|
}
|
|
|
|
|
2023-08-17 13:53:20 +00:00
|
|
|
h256 solidity::util::bzzr0Hash(std::string const& _input)
|
2016-11-15 13:55:19 +00:00
|
|
|
{
|
|
|
|
return swarmHashIntermediate(_input, 0, _input.size());
|
2016-11-14 22:12:14 +00:00
|
|
|
}
|
2019-05-07 08:25:22 +00:00
|
|
|
|
|
|
|
|
2019-12-11 16:31:36 +00:00
|
|
|
h256 solidity::util::bzzr1Hash(bytes const& _input)
|
2019-05-07 08:25:22 +00:00
|
|
|
{
|
|
|
|
if (_input.empty())
|
|
|
|
return h256{};
|
|
|
|
return chunkHash(&_input);
|
|
|
|
}
|