forked from cerc-io/plugeth
trie: reduce allocs in recHash (#27770)
This commit is contained in:
parent
7dea9c10cd
commit
5976e58415
@ -51,9 +51,8 @@ func hexToCompact(hex []byte) []byte {
|
|||||||
return buf
|
return buf
|
||||||
}
|
}
|
||||||
|
|
||||||
// hexToCompactInPlace places the compact key in input buffer, returning the length
|
// hexToCompactInPlace places the compact key in input buffer, returning the compacted key.
|
||||||
// needed for the representation
|
func hexToCompactInPlace(hex []byte) []byte {
|
||||||
func hexToCompactInPlace(hex []byte) int {
|
|
||||||
var (
|
var (
|
||||||
hexLen = len(hex) // length of the hex input
|
hexLen = len(hex) // length of the hex input
|
||||||
firstByte = byte(0)
|
firstByte = byte(0)
|
||||||
@ -77,7 +76,7 @@ func hexToCompactInPlace(hex []byte) int {
|
|||||||
hex[bi] = hex[ni]<<4 | hex[ni+1]
|
hex[bi] = hex[ni]<<4 | hex[ni+1]
|
||||||
}
|
}
|
||||||
hex[0] = firstByte
|
hex[0] = firstByte
|
||||||
return binLen
|
return hex[:binLen]
|
||||||
}
|
}
|
||||||
|
|
||||||
func compactToHex(compact []byte) []byte {
|
func compactToHex(compact []byte) []byte {
|
||||||
|
@ -86,8 +86,7 @@ func TestHexToCompactInPlace(t *testing.T) {
|
|||||||
} {
|
} {
|
||||||
hexBytes, _ := hex.DecodeString(key)
|
hexBytes, _ := hex.DecodeString(key)
|
||||||
exp := hexToCompact(hexBytes)
|
exp := hexToCompact(hexBytes)
|
||||||
sz := hexToCompactInPlace(hexBytes)
|
got := hexToCompactInPlace(hexBytes)
|
||||||
got := hexBytes[:sz]
|
|
||||||
if !bytes.Equal(exp, got) {
|
if !bytes.Equal(exp, got) {
|
||||||
t.Fatalf("test %d: encoding err\ninp %v\ngot %x\nexp %x\n", i, key, got, exp)
|
t.Fatalf("test %d: encoding err\ninp %v\ngot %x\nexp %x\n", i, key, got, exp)
|
||||||
}
|
}
|
||||||
@ -102,8 +101,7 @@ func TestHexToCompactInPlaceRandom(t *testing.T) {
|
|||||||
hexBytes := keybytesToHex(key)
|
hexBytes := keybytesToHex(key)
|
||||||
hexOrig := []byte(string(hexBytes))
|
hexOrig := []byte(string(hexBytes))
|
||||||
exp := hexToCompact(hexBytes)
|
exp := hexToCompact(hexBytes)
|
||||||
sz := hexToCompactInPlace(hexBytes)
|
got := hexToCompactInPlace(hexBytes)
|
||||||
got := hexBytes[:sz]
|
|
||||||
|
|
||||||
if !bytes.Equal(exp, got) {
|
if !bytes.Equal(exp, got) {
|
||||||
t.Fatalf("encoding err \ncpt %x\nhex %x\ngot %x\nexp %x\n",
|
t.Fatalf("encoding err \ncpt %x\nhex %x\ngot %x\nexp %x\n",
|
||||||
@ -119,6 +117,13 @@ func BenchmarkHexToCompact(b *testing.B) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func BenchmarkHexToCompactInPlace(b *testing.B) {
|
||||||
|
testBytes := []byte{0, 15, 1, 12, 11, 8, 16 /*term*/}
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
hexToCompactInPlace(testBytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func BenchmarkCompactToHex(b *testing.B) {
|
func BenchmarkCompactToHex(b *testing.B) {
|
||||||
testBytes := []byte{0, 15, 1, 12, 11, 8, 16 /*term*/}
|
testBytes := []byte{0, 15, 1, 12, 11, 8, 16 /*term*/}
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
|
@ -444,7 +444,7 @@ func (st *StackTrie) hashRec(hasher *hasher, path []byte) {
|
|||||||
case extNode:
|
case extNode:
|
||||||
st.children[0].hashRec(hasher, append(path, st.key...))
|
st.children[0].hashRec(hasher, append(path, st.key...))
|
||||||
|
|
||||||
n := shortNode{Key: hexToCompact(st.key)}
|
n := shortNode{Key: hexToCompactInPlace(st.key)}
|
||||||
if len(st.children[0].val) < 32 {
|
if len(st.children[0].val) < 32 {
|
||||||
n.Val = rawNode(st.children[0].val)
|
n.Val = rawNode(st.children[0].val)
|
||||||
} else {
|
} else {
|
||||||
@ -460,7 +460,7 @@ func (st *StackTrie) hashRec(hasher *hasher, path []byte) {
|
|||||||
|
|
||||||
case leafNode:
|
case leafNode:
|
||||||
st.key = append(st.key, byte(16))
|
st.key = append(st.key, byte(16))
|
||||||
n := shortNode{Key: hexToCompact(st.key), Val: valueNode(st.val)}
|
n := shortNode{Key: hexToCompactInPlace(st.key), Val: valueNode(st.val)}
|
||||||
|
|
||||||
n.encode(hasher.encbuf)
|
n.encode(hasher.encbuf)
|
||||||
encodedNode = hasher.encodedBytes()
|
encodedNode = hasher.encodedBytes()
|
||||||
|
Loading…
Reference in New Issue
Block a user