Merge pull request #1666 from CJentzsch/allhexfix

0x -> 0x0 for numbers
This commit is contained in:
Gav Wood 2015-04-19 15:48:05 +02:00
commit 8ef8f2e805
2 changed files with 9 additions and 9 deletions

View File

@ -123,7 +123,7 @@ json_spirit::mObject& ImportTest::makeAllFieldsHex(json_spirit::mObject& _o)
{
static const set<string> hashes {"bloom" , "coinbase", "hash", "mixHash", "parentHash", "receiptTrie",
"stateRoot", "transactionsTrie", "uncleHash", "currentCoinbase",
"previousHash", "to", "address", "caller", "origin", "secretKey"};
"previousHash", "to", "address", "caller", "origin", "secretKey", "data"};
for (auto& i: _o)
{
@ -140,7 +140,7 @@ json_spirit::mObject& ImportTest::makeAllFieldsHex(json_spirit::mObject& _o)
str = value.get_str();
else continue;
_o[key] = (str.substr(0, 2) == "0x") ? str : "0x" + toHex(toCompactBigEndian(toInt(str)));
_o[key] = (str.substr(0, 2) == "0x") ? str : "0x" + toHex(toCompactBigEndian(toInt(str), 1));
}
return _o;
}
@ -363,8 +363,8 @@ json_spirit::mObject fillJsonWithState(State _state)
for (auto const& a: _state.addresses())
{
json_spirit::mObject o;
o["balance"] = "0x" + toHex(toCompactBigEndian(_state.balance(a.first)));
o["nonce"] = "0x" + toHex(toCompactBigEndian(_state.transactionsFrom(a.first)));
o["balance"] = "0x" + toHex(toCompactBigEndian(_state.balance(a.first), 1));
o["nonce"] = "0x" + toHex(toCompactBigEndian(_state.transactionsFrom(a.first), 1));
{
json_spirit::mObject store;
for (auto const& s: _state.storage(a.first))

View File

@ -625,11 +625,11 @@ void writeBlockHeaderToJson(mObject& _o, BlockInfo const& _bi)
_o["transactionsTrie"] = toString(_bi.transactionsRoot);
_o["receiptTrie"] = toString(_bi.receiptsRoot);
_o["bloom"] = toString(_bi.logBloom);
_o["difficulty"] = "0x" + toHex(toCompactBigEndian(_bi.difficulty));
_o["number"] = "0x" + toHex(toCompactBigEndian(_bi.number));
_o["gasLimit"] = "0x" + toHex(toCompactBigEndian(_bi.gasLimit));
_o["gasUsed"] = "0x" + toHex(toCompactBigEndian(_bi.gasUsed));
_o["timestamp"] = "0x" + toHex(toCompactBigEndian(_bi.timestamp));
_o["difficulty"] = "0x" + toHex(toCompactBigEndian(_bi.difficulty), 1);
_o["number"] = "0x" + toHex(toCompactBigEndian(_bi.number), 1);
_o["gasLimit"] = "0x" + toHex(toCompactBigEndian(_bi.gasLimit), 1);
_o["gasUsed"] = "0x" + toHex(toCompactBigEndian(_bi.gasUsed), 1);
_o["timestamp"] = "0x" + toHex(toCompactBigEndian(_bi.timestamp), 1);
_o["extraData"] ="0x" + toHex(_bi.extraData);
_o["mixHash"] = toString(_bi.mixHash);
_o["nonce"] = toString(_bi.nonce);