Split common crates out into their own repos (#3890)

## Proposed Changes

Split out several crates which now exist in separate repos under `sigp`.

- [`ssz` and `ssz_derive`](https://github.com/sigp/ethereum_ssz)
- [`tree_hash` and `tree_hash_derive`](https://github.com/sigp/tree_hash)
- [`ethereum_hashing`](https://github.com/sigp/ethereum_hashing)
- [`ethereum_serde_utils`](https://github.com/sigp/ethereum_serde_utils)
- [`ssz_types`](https://github.com/sigp/ssz_types)

For the published crates see: https://crates.io/teams/github:sigp:crates-io?sort=recent-updates.

## Additional Info

- [x] Need to work out how to handle versioning. I was hoping to do 1.0 versions of several crates, but if they depend on `ethereum-types 0.x` that is not going to work. EDIT: decided to go with 0.5.x versions.
- [x] Need to port several changes from `tree-states`, `capella`, `eip4844` branches to the external repos.
This commit is contained in:
Michael Sproul 2023-04-28 01:15:40 +00:00
parent 7456e1e8fa
commit c11638c36c
162 changed files with 469 additions and 10669 deletions

299
Cargo.lock generated
View File

@ -590,10 +590,9 @@ dependencies = [
"environment", "environment",
"eth1", "eth1",
"eth2", "eth2",
"eth2_hashing", "ethereum_hashing",
"eth2_ssz", "ethereum_ssz",
"eth2_ssz_derive", "ethereum_ssz_derive",
"eth2_ssz_types",
"execution_layer", "execution_layer",
"exit-future", "exit-future",
"fork_choice", "fork_choice",
@ -624,6 +623,7 @@ dependencies = [
"sloggers", "sloggers",
"slot_clock", "slot_clock",
"smallvec", "smallvec",
"ssz_types",
"state_processing", "state_processing",
"store", "store",
"strum", "strum",
@ -779,10 +779,10 @@ version = "0.2.0"
dependencies = [ dependencies = [
"arbitrary", "arbitrary",
"blst", "blst",
"eth2_hashing",
"eth2_serde_utils",
"eth2_ssz",
"ethereum-types 0.14.1", "ethereum-types 0.14.1",
"ethereum_hashing",
"ethereum_serde_utils",
"ethereum_ssz",
"hex", "hex",
"milagro_bls", "milagro_bls",
"rand 0.7.3", "rand 0.7.3",
@ -824,7 +824,7 @@ dependencies = [
"clap", "clap",
"clap_utils", "clap_utils",
"eth2_network_config", "eth2_network_config",
"eth2_ssz", "ethereum_ssz",
"hex", "hex",
"lighthouse_network", "lighthouse_network",
"log", "log",
@ -922,14 +922,14 @@ dependencies = [
name = "cached_tree_hash" name = "cached_tree_hash"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"eth2_hashing",
"eth2_ssz",
"eth2_ssz_derive",
"eth2_ssz_types",
"ethereum-types 0.14.1", "ethereum-types 0.14.1",
"ethereum_hashing",
"ethereum_ssz",
"ethereum_ssz_derive",
"quickcheck", "quickcheck",
"quickcheck_macros", "quickcheck_macros",
"smallvec", "smallvec",
"ssz_types",
"tree_hash", "tree_hash",
] ]
@ -1073,8 +1073,8 @@ dependencies = [
"clap", "clap",
"dirs", "dirs",
"eth2_network_config", "eth2_network_config",
"eth2_ssz",
"ethereum-types 0.14.1", "ethereum-types 0.14.1",
"ethereum_ssz",
"hex", "hex",
"serde", "serde",
"serde_json", "serde_json",
@ -1166,16 +1166,6 @@ dependencies = [
"crossbeam-utils", "crossbeam-utils",
] ]
[[package]]
name = "console_error_panic_hook"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc"
dependencies = [
"cfg-if",
"wasm-bindgen",
]
[[package]] [[package]]
name = "const-oid" name = "const-oid"
version = "0.9.2" version = "0.9.2"
@ -1648,8 +1638,8 @@ dependencies = [
name = "deposit_contract" name = "deposit_contract"
version = "0.2.0" version = "0.2.0"
dependencies = [ dependencies = [
"eth2_ssz",
"ethabi 16.0.0", "ethabi 16.0.0",
"ethereum_ssz",
"hex", "hex",
"reqwest", "reqwest",
"serde_json", "serde_json",
@ -1965,9 +1955,9 @@ dependencies = [
"compare_fields", "compare_fields",
"compare_fields_derive", "compare_fields_derive",
"derivative", "derivative",
"eth2_ssz",
"eth2_ssz_derive",
"ethereum-types 0.14.1", "ethereum-types 0.14.1",
"ethereum_ssz",
"ethereum_ssz_derive",
"execution_layer", "execution_layer",
"fork_choice", "fork_choice",
"fs2", "fs2",
@ -2157,8 +2147,8 @@ dependencies = [
"environment", "environment",
"eth1_test_rig", "eth1_test_rig",
"eth2", "eth2",
"eth2_ssz", "ethereum_ssz",
"eth2_ssz_derive", "ethereum_ssz_derive",
"execution_layer", "execution_layer",
"futures", "futures",
"hex", "hex",
@ -2201,9 +2191,9 @@ dependencies = [
"account_utils", "account_utils",
"bytes", "bytes",
"eth2_keystore", "eth2_keystore",
"eth2_serde_utils", "ethereum_serde_utils",
"eth2_ssz", "ethereum_ssz",
"eth2_ssz_derive", "ethereum_ssz_derive",
"futures", "futures",
"futures-util", "futures-util",
"libsecp256k1", "libsecp256k1",
@ -2230,25 +2220,13 @@ dependencies = [
"types", "types",
] ]
[[package]]
name = "eth2_hashing"
version = "0.3.0"
dependencies = [
"cpufeatures",
"lazy_static",
"ring",
"rustc-hex",
"sha2 0.10.6",
"wasm-bindgen-test",
]
[[package]] [[package]]
name = "eth2_interop_keypairs" name = "eth2_interop_keypairs"
version = "0.2.0" version = "0.2.0"
dependencies = [ dependencies = [
"base64 0.13.1", "base64 0.13.1",
"bls", "bls",
"eth2_hashing", "ethereum_hashing",
"hex", "hex",
"lazy_static", "lazy_static",
"num-bigint", "num-bigint",
@ -2297,62 +2275,13 @@ version = "0.2.0"
dependencies = [ dependencies = [
"discv5", "discv5",
"eth2_config", "eth2_config",
"eth2_ssz", "ethereum_ssz",
"serde_yaml", "serde_yaml",
"tempfile", "tempfile",
"types", "types",
"zip", "zip",
] ]
[[package]]
name = "eth2_serde_utils"
version = "0.1.1"
dependencies = [
"ethereum-types 0.14.1",
"hex",
"serde",
"serde_derive",
"serde_json",
]
[[package]]
name = "eth2_ssz"
version = "0.4.1"
dependencies = [
"eth2_ssz_derive",
"ethereum-types 0.14.1",
"itertools",
"smallvec",
]
[[package]]
name = "eth2_ssz_derive"
version = "0.3.1"
dependencies = [
"darling 0.13.4",
"eth2_ssz",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "eth2_ssz_types"
version = "0.2.2"
dependencies = [
"arbitrary",
"derivative",
"eth2_serde_utils",
"eth2_ssz",
"serde",
"serde_derive",
"serde_json",
"smallvec",
"tree_hash",
"tree_hash_derive",
"typenum",
]
[[package]] [[package]]
name = "eth2_wallet" name = "eth2_wallet"
version = "0.1.0" version = "0.1.0"
@ -2491,6 +2420,54 @@ dependencies = [
"uint", "uint",
] ]
[[package]]
name = "ethereum_hashing"
version = "1.0.0-beta.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "233dc6f434ce680dbabf4451ee3380cec46cb3c45d66660445a435619710dd35"
dependencies = [
"cpufeatures",
"lazy_static",
"ring",
"sha2 0.10.6",
]
[[package]]
name = "ethereum_serde_utils"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f8cb04ea380a33e9c269fa5f8df6f2d63dee19728235f3e639e7674e038686a"
dependencies = [
"ethereum-types 0.14.1",
"hex",
"serde",
"serde_derive",
"serde_json",
]
[[package]]
name = "ethereum_ssz"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32749e96305376af40d7a7ee8ea4c4c64c68d09ff94a81ab78c8d9bc7153c221"
dependencies = [
"ethereum-types 0.14.1",
"itertools",
"smallvec",
]
[[package]]
name = "ethereum_ssz_derive"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9cac7ef2107926cea34c0064056f9bb134d2085eef882388d151d2e59174cf0"
dependencies = [
"darling 0.13.4",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]] [[package]]
name = "ethers-core" name = "ethers-core"
version = "1.0.2" version = "1.0.2"
@ -2593,10 +2570,9 @@ dependencies = [
"bytes", "bytes",
"environment", "environment",
"eth2", "eth2",
"eth2_serde_utils",
"eth2_ssz",
"eth2_ssz_types",
"ethereum-consensus", "ethereum-consensus",
"ethereum_serde_utils",
"ethereum_ssz",
"ethers-core", "ethers-core",
"exit-future", "exit-future",
"fork_choice", "fork_choice",
@ -2619,6 +2595,7 @@ dependencies = [
"slog", "slog",
"slot_clock", "slot_clock",
"ssz-rs", "ssz-rs",
"ssz_types",
"state_processing", "state_processing",
"strum", "strum",
"superstruct 0.6.0", "superstruct 0.6.0",
@ -2772,8 +2749,8 @@ name = "fork_choice"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"beacon_chain", "beacon_chain",
"eth2_ssz", "ethereum_ssz",
"eth2_ssz_derive", "ethereum_ssz_derive",
"proto_array", "proto_array",
"slog", "slog",
"state_processing", "state_processing",
@ -2961,8 +2938,8 @@ dependencies = [
"environment", "environment",
"eth1", "eth1",
"eth1_test_rig", "eth1_test_rig",
"eth2_hashing", "ethereum_hashing",
"eth2_ssz", "ethereum_ssz",
"futures", "futures",
"int_to_bytes", "int_to_bytes",
"merkle_proof", "merkle_proof",
@ -3316,8 +3293,8 @@ dependencies = [
"environment", "environment",
"eth1", "eth1",
"eth2", "eth2",
"eth2_serde_utils", "ethereum_serde_utils",
"eth2_ssz", "ethereum_ssz",
"execution_layer", "execution_layer",
"futures", "futures",
"genesis", "genesis",
@ -3855,8 +3832,8 @@ dependencies = [
"eth1_test_rig", "eth1_test_rig",
"eth2", "eth2",
"eth2_network_config", "eth2_network_config",
"eth2_ssz",
"eth2_wallet", "eth2_wallet",
"ethereum_ssz",
"genesis", "genesis",
"int_to_bytes", "int_to_bytes",
"lighthouse_network", "lighthouse_network",
@ -4506,8 +4483,8 @@ dependencies = [
"env_logger 0.9.3", "env_logger 0.9.3",
"environment", "environment",
"eth1", "eth1",
"eth2_hashing",
"eth2_network_config", "eth2_network_config",
"ethereum_hashing",
"futures", "futures",
"lazy_static", "lazy_static",
"lighthouse_metrics", "lighthouse_metrics",
@ -4547,9 +4524,8 @@ dependencies = [
"dirs", "dirs",
"discv5", "discv5",
"error-chain", "error-chain",
"eth2_ssz", "ethereum_ssz",
"eth2_ssz_derive", "ethereum_ssz_derive",
"eth2_ssz_types",
"exit-future", "exit-future",
"fnv", "fnv",
"futures", "futures",
@ -4574,6 +4550,7 @@ dependencies = [
"slog-term", "slog-term",
"smallvec", "smallvec",
"snap", "snap",
"ssz_types",
"strum", "strum",
"superstruct 0.5.0", "superstruct 0.5.0",
"task_executor", "task_executor",
@ -4815,8 +4792,8 @@ dependencies = [
name = "merkle_proof" name = "merkle_proof"
version = "0.2.0" version = "0.2.0"
dependencies = [ dependencies = [
"eth2_hashing",
"ethereum-types 0.14.1", "ethereum-types 0.14.1",
"ethereum_hashing",
"lazy_static", "lazy_static",
"quickcheck", "quickcheck",
"quickcheck_macros", "quickcheck_macros",
@ -5216,9 +5193,8 @@ dependencies = [
"derivative", "derivative",
"environment", "environment",
"error-chain", "error-chain",
"eth2_ssz",
"eth2_ssz_types",
"ethereum-types 0.14.1", "ethereum-types 0.14.1",
"ethereum_ssz",
"execution_layer", "execution_layer",
"exit-future", "exit-future",
"fnv", "fnv",
@ -5244,6 +5220,7 @@ dependencies = [
"sloggers", "sloggers",
"slot_clock", "slot_clock",
"smallvec", "smallvec",
"ssz_types",
"store", "store",
"strum", "strum",
"task_executor", "task_executor",
@ -5563,8 +5540,8 @@ dependencies = [
"beacon_chain", "beacon_chain",
"bitvec 1.0.1", "bitvec 1.0.1",
"derivative", "derivative",
"eth2_ssz", "ethereum_ssz",
"eth2_ssz_derive", "ethereum_ssz_derive",
"itertools", "itertools",
"lazy_static", "lazy_static",
"lighthouse_metrics", "lighthouse_metrics",
@ -6212,8 +6189,8 @@ dependencies = [
name = "proto_array" name = "proto_array"
version = "0.2.0" version = "0.2.0"
dependencies = [ dependencies = [
"eth2_ssz", "ethereum_ssz",
"eth2_ssz_derive", "ethereum_ssz_derive",
"safe_arith", "safe_arith",
"serde", "serde",
"serde_derive", "serde_derive",
@ -7320,8 +7297,8 @@ version = "0.1.0"
dependencies = [ dependencies = [
"bincode", "bincode",
"byteorder", "byteorder",
"eth2_ssz", "ethereum_ssz",
"eth2_ssz_derive", "ethereum_ssz_derive",
"filesystem", "filesystem",
"flate2", "flate2",
"lazy_static", "lazy_static",
@ -7369,7 +7346,7 @@ name = "slashing_protection"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"arbitrary", "arbitrary",
"eth2_serde_utils", "ethereum_serde_utils",
"filesystem", "filesystem",
"lazy_static", "lazy_static",
"r2d2", "r2d2",
@ -7597,6 +7574,24 @@ dependencies = [
"syn 1.0.109", "syn 1.0.109",
] ]
[[package]]
name = "ssz_types"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8052a1004e979c0be24b9e55940195553103cc57d0b34f7e2c4e32793325e402"
dependencies = [
"arbitrary",
"derivative",
"ethereum_serde_utils",
"ethereum_ssz",
"itertools",
"serde",
"serde_derive",
"smallvec",
"tree_hash",
"typenum",
]
[[package]] [[package]]
name = "state_processing" name = "state_processing"
version = "0.2.0" version = "0.2.0"
@ -7606,10 +7601,9 @@ dependencies = [
"bls", "bls",
"derivative", "derivative",
"env_logger 0.9.3", "env_logger 0.9.3",
"eth2_hashing", "ethereum_hashing",
"eth2_ssz", "ethereum_ssz",
"eth2_ssz_derive", "ethereum_ssz_derive",
"eth2_ssz_types",
"int_to_bytes", "int_to_bytes",
"integer-sqrt", "integer-sqrt",
"itertools", "itertools",
@ -7619,6 +7613,7 @@ dependencies = [
"rayon", "rayon",
"safe_arith", "safe_arith",
"smallvec", "smallvec",
"ssz_types",
"tokio", "tokio",
"tree_hash", "tree_hash",
"types", "types",
@ -7629,7 +7624,7 @@ name = "state_transition_vectors"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"beacon_chain", "beacon_chain",
"eth2_ssz", "ethereum_ssz",
"lazy_static", "lazy_static",
"state_processing", "state_processing",
"tokio", "tokio",
@ -7649,8 +7644,8 @@ dependencies = [
"beacon_chain", "beacon_chain",
"db-key", "db-key",
"directory", "directory",
"eth2_ssz", "ethereum_ssz",
"eth2_ssz_derive", "ethereum_ssz_derive",
"itertools", "itertools",
"lazy_static", "lazy_static",
"leveldb", "leveldb",
@ -7778,8 +7773,8 @@ name = "swap_or_not_shuffle"
version = "0.2.0" version = "0.2.0"
dependencies = [ dependencies = [
"criterion", "criterion",
"eth2_hashing",
"ethereum-types 0.14.1", "ethereum-types 0.14.1",
"ethereum_hashing",
] ]
[[package]] [[package]]
@ -8446,22 +8441,20 @@ dependencies = [
[[package]] [[package]]
name = "tree_hash" name = "tree_hash"
version = "0.4.1" version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca8488e272d45adc36db8f6c99d09613f58a7cd06c7b347546c87d9a29ca11e8"
dependencies = [ dependencies = [
"beacon_chain",
"eth2_hashing",
"eth2_ssz",
"eth2_ssz_derive",
"ethereum-types 0.14.1", "ethereum-types 0.14.1",
"rand 0.8.5", "ethereum_hashing",
"smallvec", "smallvec",
"tree_hash_derive",
"types",
] ]
[[package]] [[package]]
name = "tree_hash_derive" name = "tree_hash_derive"
version = "0.4.0" version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83baa26594d96889e5fef7638dfb0f41e16070301a5cf6da99b9a6a0804cec89"
dependencies = [ dependencies = [
"darling 0.13.4", "darling 0.13.4",
"quote", "quote",
@ -8616,13 +8609,12 @@ dependencies = [
"compare_fields_derive", "compare_fields_derive",
"criterion", "criterion",
"derivative", "derivative",
"eth2_hashing",
"eth2_interop_keypairs", "eth2_interop_keypairs",
"eth2_serde_utils",
"eth2_ssz",
"eth2_ssz_derive",
"eth2_ssz_types",
"ethereum-types 0.14.1", "ethereum-types 0.14.1",
"ethereum_hashing",
"ethereum_serde_utils",
"ethereum_ssz",
"ethereum_ssz_derive",
"hex", "hex",
"int_to_bytes", "int_to_bytes",
"itertools", "itertools",
@ -8645,6 +8637,7 @@ dependencies = [
"serde_yaml", "serde_yaml",
"slog", "slog",
"smallvec", "smallvec",
"ssz_types",
"state_processing", "state_processing",
"superstruct 0.6.0", "superstruct 0.6.0",
"swap_or_not_shuffle", "swap_or_not_shuffle",
@ -8822,7 +8815,7 @@ dependencies = [
"environment", "environment",
"eth2", "eth2",
"eth2_keystore", "eth2_keystore",
"eth2_serde_utils", "ethereum_serde_utils",
"exit-future", "exit-future",
"filesystem", "filesystem",
"futures", "futures",
@ -9076,30 +9069,6 @@ version = "0.2.84"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d"
[[package]]
name = "wasm-bindgen-test"
version = "0.3.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db36fc0f9fb209e88fb3642590ae0205bb5a56216dabd963ba15879fe53a30b"
dependencies = [
"console_error_panic_hook",
"js-sys",
"scoped-tls",
"wasm-bindgen",
"wasm-bindgen-futures",
"wasm-bindgen-test-macro",
]
[[package]]
name = "wasm-bindgen-test-macro"
version = "0.3.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0734759ae6b3b1717d661fe4f016efcfb9828f5edb4520c18eaee05af3b43be9"
dependencies = [
"proc-macro2",
"quote",
]
[[package]] [[package]]
name = "wasm-streams" name = "wasm-streams"
version = "0.2.3" version = "0.2.3"

View File

@ -53,17 +53,10 @@ members = [
"consensus/fork_choice", "consensus/fork_choice",
"consensus/proto_array", "consensus/proto_array",
"consensus/safe_arith", "consensus/safe_arith",
"consensus/ssz",
"consensus/ssz_derive",
"consensus/ssz_types",
"consensus/serde_utils",
"consensus/state_processing", "consensus/state_processing",
"consensus/swap_or_not_shuffle", "consensus/swap_or_not_shuffle",
"consensus/tree_hash",
"consensus/tree_hash_derive",
"crypto/bls", "crypto/bls",
"crypto/eth2_hashing",
"crypto/eth2_key_derivation", "crypto/eth2_key_derivation",
"crypto/eth2_keystore", "crypto/eth2_keystore",
"crypto/eth2_wallet", "crypto/eth2_wallet",
@ -95,13 +88,6 @@ resolver = "2"
[patch] [patch]
[patch.crates-io] [patch.crates-io]
warp = { git = "https://github.com/macladson/warp", rev="7e75acc368229a46a236a8c991bf251fe7fe50ef" } warp = { git = "https://github.com/macladson/warp", rev="7e75acc368229a46a236a8c991bf251fe7fe50ef" }
eth2_ssz = { path = "consensus/ssz" }
eth2_ssz_derive = { path = "consensus/ssz_derive" }
eth2_ssz_types = { path = "consensus/ssz_types" }
eth2_hashing = { path = "crypto/eth2_hashing" }
tree_hash = { path = "consensus/tree_hash" }
tree_hash_derive = { path = "consensus/tree_hash_derive" }
eth2_serde_utils = { path = "consensus/serde_utils" }
arbitrary = { git = "https://github.com/michaelsproul/arbitrary", rev="f002b99989b561ddce62e4cf2887b0f8860ae991" } arbitrary = { git = "https://github.com/michaelsproul/arbitrary", rev="f002b99989b561ddce62e4cf2887b0f8860ae991" }
[patch."https://github.com/ralexstokes/mev-rs"] [patch."https://github.com/ralexstokes/mev-rs"]

View File

@ -30,12 +30,12 @@ serde_derive = "1.0.116"
slog = { version = "2.5.2", features = ["max_level_trace"] } slog = { version = "2.5.2", features = ["max_level_trace"] }
sloggers = { version = "2.1.1", features = ["json"] } sloggers = { version = "2.1.1", features = ["json"] }
slot_clock = { path = "../../common/slot_clock" } slot_clock = { path = "../../common/slot_clock" }
eth2_hashing = "0.3.0" ethereum_hashing = "1.0.0-beta.2"
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_ssz_types = "0.2.2" ssz_types = "0.5.0"
eth2_ssz_derive = "0.3.1" ethereum_ssz_derive = "0.5.0"
state_processing = { path = "../../consensus/state_processing" } state_processing = { path = "../../consensus/state_processing" }
tree_hash = "0.4.1" tree_hash = "0.5.0"
types = { path = "../../consensus/types" } types = { path = "../../consensus/types" }
tokio = "1.14.0" tokio = "1.14.0"
tokio-stream = "0.1.3" tokio-stream = "0.1.3"

View File

@ -999,7 +999,7 @@ fn descriptive_db_error(item: &str, error: &StoreError) -> String {
mod test { mod test {
use super::*; use super::*;
use crate::validator_monitor::DEFAULT_INDIVIDUAL_TRACKING_THRESHOLD; use crate::validator_monitor::DEFAULT_INDIVIDUAL_TRACKING_THRESHOLD;
use eth2_hashing::hash; use ethereum_hashing::hash;
use genesis::{ use genesis::{
generate_deterministic_keypairs, interop_genesis_state, DEFAULT_ETH1_BLOCK_HASH, generate_deterministic_keypairs, interop_genesis_state, DEFAULT_ETH1_BLOCK_HASH,
}; };

View File

@ -1,7 +1,7 @@
use crate::metrics; use crate::metrics;
use eth1::{Config as Eth1Config, Eth1Block, Service as HttpService}; use eth1::{Config as Eth1Config, Eth1Block, Service as HttpService};
use eth2::lighthouse::Eth1SyncStatusData; use eth2::lighthouse::Eth1SyncStatusData;
use eth2_hashing::hash; use ethereum_hashing::hash;
use int_to_bytes::int_to_bytes32; use int_to_bytes::int_to_bytes32;
use slog::{debug, error, trace, Logger}; use slog::{debug, error, trace, Logger};
use ssz::{Decode, Encode}; use ssz::{Decode, Encode};

View File

@ -20,9 +20,9 @@ serde = { version = "1.0.116", features = ["derive"] }
hex = "0.4.2" hex = "0.4.2"
types = { path = "../../consensus/types"} types = { path = "../../consensus/types"}
merkle_proof = { path = "../../consensus/merkle_proof"} merkle_proof = { path = "../../consensus/merkle_proof"}
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_ssz_derive = "0.3.1" ethereum_ssz_derive = "0.5.0"
tree_hash = "0.4.1" tree_hash = "0.5.0"
parking_lot = "0.12.0" parking_lot = "0.12.0"
slog = "2.5.2" slog = "2.5.2"
superstruct = "0.5.0" superstruct = "0.5.0"

View File

@ -13,7 +13,7 @@ slog = "2.5.2"
futures = "0.3.7" futures = "0.3.7"
sensitive_url = { path = "../../common/sensitive_url" } sensitive_url = { path = "../../common/sensitive_url" }
reqwest = { version = "0.11.0", features = ["json","stream"] } reqwest = { version = "0.11.0", features = ["json","stream"] }
eth2_serde_utils = "0.1.1" ethereum_serde_utils = "0.5.0"
serde_json = "1.0.58" serde_json = "1.0.58"
serde = { version = "1.0.116", features = ["derive"] } serde = { version = "1.0.116", features = ["derive"] }
warp = { version = "0.3.2", features = ["tls"] } warp = { version = "0.3.2", features = ["tls"] }
@ -22,15 +22,15 @@ environment = { path = "../../lighthouse/environment" }
bytes = "1.1.0" bytes = "1.1.0"
task_executor = { path = "../../common/task_executor" } task_executor = { path = "../../common/task_executor" }
hex = "0.4.2" hex = "0.4.2"
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_ssz_types = "0.2.2" ssz_types = "0.5.0"
eth2 = { path = "../../common/eth2" } eth2 = { path = "../../common/eth2" }
state_processing = { path = "../../consensus/state_processing" } state_processing = { path = "../../consensus/state_processing" }
superstruct = "0.6.0" superstruct = "0.6.0"
lru = "0.7.1" lru = "0.7.1"
exit-future = "0.2.0" exit-future = "0.2.0"
tree_hash = "0.4.1" tree_hash = "0.5.0"
tree_hash_derive = { path = "../../consensus/tree_hash_derive"} tree_hash_derive = "0.5.0"
parking_lot = "0.12.0" parking_lot = "0.12.0"
slot_clock = { path = "../../common/slot_clock" } slot_clock = { path = "../../common/slot_clock" }
tempfile = "3.1.0" tempfile = "3.1.0"

View File

@ -127,11 +127,11 @@ pub enum BlockByNumberQuery<'a> {
pub struct ExecutionBlock { pub struct ExecutionBlock {
#[serde(rename = "hash")] #[serde(rename = "hash")]
pub block_hash: ExecutionBlockHash, pub block_hash: ExecutionBlockHash,
#[serde(rename = "number", with = "eth2_serde_utils::u64_hex_be")] #[serde(rename = "number", with = "serde_utils::u64_hex_be")]
pub block_number: u64, pub block_number: u64,
pub parent_hash: ExecutionBlockHash, pub parent_hash: ExecutionBlockHash,
pub total_difficulty: Uint256, pub total_difficulty: Uint256,
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub timestamp: u64, pub timestamp: u64,
} }
@ -157,13 +157,13 @@ pub struct ExecutionBlockWithTransactions<T: EthSpec> {
pub logs_bloom: FixedVector<u8, T::BytesPerLogsBloom>, pub logs_bloom: FixedVector<u8, T::BytesPerLogsBloom>,
#[serde(alias = "mixHash")] #[serde(alias = "mixHash")]
pub prev_randao: Hash256, pub prev_randao: Hash256,
#[serde(rename = "number", with = "eth2_serde_utils::u64_hex_be")] #[serde(rename = "number", with = "serde_utils::u64_hex_be")]
pub block_number: u64, pub block_number: u64,
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub gas_limit: u64, pub gas_limit: u64,
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub gas_used: u64, pub gas_used: u64,
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub timestamp: u64, pub timestamp: u64,
#[serde(with = "ssz_types::serde_utils::hex_var_list")] #[serde(with = "ssz_types::serde_utils::hex_var_list")]
pub extra_data: VariableList<u8, T::MaxExtraDataBytes>, pub extra_data: VariableList<u8, T::MaxExtraDataBytes>,

View File

@ -917,7 +917,7 @@ impl HttpJsonRpc {
) -> Result<Vec<Option<ExecutionPayloadBodyV1<E>>>, Error> { ) -> Result<Vec<Option<ExecutionPayloadBodyV1<E>>>, Error> {
#[derive(Serialize)] #[derive(Serialize)]
#[serde(transparent)] #[serde(transparent)]
struct Quantity(#[serde(with = "eth2_serde_utils::u64_hex_be")] u64); struct Quantity(#[serde(with = "serde_utils::u64_hex_be")] u64);
let params = json!([Quantity(start), Quantity(count)]); let params = json!([Quantity(start), Quantity(count)]);
let response: Vec<Option<JsonExecutionPayloadBodyV1<E>>> = self let response: Vec<Option<JsonExecutionPayloadBodyV1<E>>> = self

View File

@ -35,7 +35,7 @@ pub struct JsonResponseBody {
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
#[serde(transparent)] #[serde(transparent)]
pub struct TransparentJsonPayloadId(#[serde(with = "eth2_serde_utils::bytes_8_hex")] pub PayloadId); pub struct TransparentJsonPayloadId(#[serde(with = "serde_utils::bytes_8_hex")] pub PayloadId);
impl From<PayloadId> for TransparentJsonPayloadId { impl From<PayloadId> for TransparentJsonPayloadId {
fn from(id: PayloadId) -> Self { fn from(id: PayloadId) -> Self {
@ -56,7 +56,7 @@ pub type JsonPayloadIdRequest = TransparentJsonPayloadId;
#[derive(Debug, PartialEq, Serialize, Deserialize)] #[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct JsonPayloadIdResponse { pub struct JsonPayloadIdResponse {
#[serde(with = "eth2_serde_utils::bytes_8_hex")] #[serde(with = "serde_utils::bytes_8_hex")]
pub payload_id: PayloadId, pub payload_id: PayloadId,
} }
@ -79,17 +79,17 @@ pub struct JsonExecutionPayload<T: EthSpec> {
#[serde(with = "serde_logs_bloom")] #[serde(with = "serde_logs_bloom")]
pub logs_bloom: FixedVector<u8, T::BytesPerLogsBloom>, pub logs_bloom: FixedVector<u8, T::BytesPerLogsBloom>,
pub prev_randao: Hash256, pub prev_randao: Hash256,
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub block_number: u64, pub block_number: u64,
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub gas_limit: u64, pub gas_limit: u64,
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub gas_used: u64, pub gas_used: u64,
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub timestamp: u64, pub timestamp: u64,
#[serde(with = "ssz_types::serde_utils::hex_var_list")] #[serde(with = "ssz_types::serde_utils::hex_var_list")]
pub extra_data: VariableList<u8, T::MaxExtraDataBytes>, pub extra_data: VariableList<u8, T::MaxExtraDataBytes>,
#[serde(with = "eth2_serde_utils::u256_hex_be")] #[serde(with = "serde_utils::u256_hex_be")]
pub base_fee_per_gas: Uint256, pub base_fee_per_gas: Uint256,
pub block_hash: ExecutionBlockHash, pub block_hash: ExecutionBlockHash,
#[serde(with = "ssz_types::serde_utils::list_of_hex_var_list")] #[serde(with = "ssz_types::serde_utils::list_of_hex_var_list")]
@ -226,7 +226,7 @@ pub struct JsonGetPayloadResponse<T: EthSpec> {
pub execution_payload: JsonExecutionPayloadV1<T>, pub execution_payload: JsonExecutionPayloadV1<T>,
#[superstruct(only(V2), partial_getter(rename = "execution_payload_v2"))] #[superstruct(only(V2), partial_getter(rename = "execution_payload_v2"))]
pub execution_payload: JsonExecutionPayloadV2<T>, pub execution_payload: JsonExecutionPayloadV2<T>,
#[serde(with = "eth2_serde_utils::u256_hex_be")] #[serde(with = "serde_utils::u256_hex_be")]
pub block_value: Uint256, pub block_value: Uint256,
} }
@ -252,12 +252,12 @@ impl<T: EthSpec> From<JsonGetPayloadResponse<T>> for GetPayloadResponse<T> {
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct JsonWithdrawal { pub struct JsonWithdrawal {
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub index: u64, pub index: u64,
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub validator_index: u64, pub validator_index: u64,
pub address: Address, pub address: Address,
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub amount: u64, pub amount: u64,
} }
@ -295,7 +295,7 @@ impl From<JsonWithdrawal> for Withdrawal {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(untagged)] #[serde(untagged)]
pub struct JsonPayloadAttributes { pub struct JsonPayloadAttributes {
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub timestamp: u64, pub timestamp: u64,
pub prev_randao: Hash256, pub prev_randao: Hash256,
pub suggested_fee_recipient: Address, pub suggested_fee_recipient: Address,
@ -520,18 +520,18 @@ impl<E: EthSpec> From<JsonExecutionPayloadBodyV1<E>> for ExecutionPayloadBodyV1<
#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)] #[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct TransitionConfigurationV1 { pub struct TransitionConfigurationV1 {
#[serde(with = "eth2_serde_utils::u256_hex_be")] #[serde(with = "serde_utils::u256_hex_be")]
pub terminal_total_difficulty: Uint256, pub terminal_total_difficulty: Uint256,
pub terminal_block_hash: ExecutionBlockHash, pub terminal_block_hash: ExecutionBlockHash,
#[serde(with = "eth2_serde_utils::u64_hex_be")] #[serde(with = "serde_utils::u64_hex_be")]
pub terminal_block_number: u64, pub terminal_block_number: u64,
} }
/// Serializes the `logs_bloom` field of an `ExecutionPayload`. /// Serializes the `logs_bloom` field of an `ExecutionPayload`.
pub mod serde_logs_bloom { pub mod serde_logs_bloom {
use super::*; use super::*;
use eth2_serde_utils::hex::PrefixedHexVisitor;
use serde::{Deserializer, Serializer}; use serde::{Deserializer, Serializer};
use serde_utils::hex::PrefixedHexVisitor;
pub fn serialize<S, U>(bytes: &FixedVector<u8, U>, serializer: S) -> Result<S::Ok, S::Error> pub fn serialize<S, U>(bytes: &FixedVector<u8, U>, serializer: S) -> Result<S::Ok, S::Error>
where where

View File

@ -362,7 +362,7 @@ pub async fn handle_rpc<T: EthSpec>(
ENGINE_GET_PAYLOAD_BODIES_BY_RANGE_V1 => { ENGINE_GET_PAYLOAD_BODIES_BY_RANGE_V1 => {
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(transparent)] #[serde(transparent)]
struct Quantity(#[serde(with = "eth2_serde_utils::u64_hex_be")] pub u64); struct Quantity(#[serde(with = "serde_utils::u64_hex_be")] pub u64);
let start = get_param::<Quantity>(params, 0) let start = get_param::<Quantity>(params, 0)
.map_err(|s| (s, BAD_PARAMS_ERROR_CODE))? .map_err(|s| (s, BAD_PARAMS_ERROR_CODE))?

View File

@ -16,9 +16,9 @@ eth1 = { path = "../eth1"}
rayon = "1.4.1" rayon = "1.4.1"
state_processing = { path = "../../consensus/state_processing" } state_processing = { path = "../../consensus/state_processing" }
merkle_proof = { path = "../../consensus/merkle_proof" } merkle_proof = { path = "../../consensus/merkle_proof" }
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_hashing = "0.3.0" ethereum_hashing = "1.0.0-beta.2"
tree_hash = "0.4.1" tree_hash = "0.5.0"
tokio = { version = "1.14.0", features = ["full"] } tokio = { version = "1.14.0", features = ["full"] }
slog = "2.5.2" slog = "2.5.2"
int_to_bytes = { path = "../../consensus/int_to_bytes" } int_to_bytes = { path = "../../consensus/int_to_bytes" }

View File

@ -1,5 +1,5 @@
use crate::common::genesis_deposits; use crate::common::genesis_deposits;
use eth2_hashing::hash; use ethereum_hashing::hash;
use rayon::prelude::*; use rayon::prelude::*;
use ssz::Encode; use ssz::Encode;
use state_processing::initialize_beacon_state_from_eth1; use state_processing::initialize_beacon_state_from_eth1;

View File

@ -24,7 +24,7 @@ lighthouse_metrics = { path = "../../common/lighthouse_metrics" }
lazy_static = "1.4.0" lazy_static = "1.4.0"
warp_utils = { path = "../../common/warp_utils" } warp_utils = { path = "../../common/warp_utils" }
slot_clock = { path = "../../common/slot_clock" } slot_clock = { path = "../../common/slot_clock" }
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
bs58 = "0.4.0" bs58 = "0.4.0"
futures = "0.3.8" futures = "0.3.8"
execution_layer = {path = "../execution_layer"} execution_layer = {path = "../execution_layer"}
@ -32,11 +32,11 @@ parking_lot = "0.12.0"
safe_arith = {path = "../../consensus/safe_arith"} safe_arith = {path = "../../consensus/safe_arith"}
task_executor = { path = "../../common/task_executor" } task_executor = { path = "../../common/task_executor" }
lru = "0.7.7" lru = "0.7.7"
tree_hash = "0.4.1" tree_hash = "0.5.0"
sysinfo = "0.26.5" sysinfo = "0.26.5"
system_health = { path = "../../common/system_health" } system_health = { path = "../../common/system_health" }
directory = { path = "../../common/directory" } directory = { path = "../../common/directory" }
eth2_serde_utils = "0.1.1" ethereum_serde_utils = "0.5.0"
operation_pool = { path = "../operation_pool" } operation_pool = { path = "../operation_pool" }
sensitive_url = { path = "../../common/sensitive_url" } sensitive_url = { path = "../../common/sensitive_url" }
unused_port = {path = "../../common/unused_port"} unused_port = {path = "../../common/unused_port"}

View File

@ -75,15 +75,15 @@ pub fn get_validator_count<T: BeaconChainTypes>(
#[derive(PartialEq, Serialize, Deserialize)] #[derive(PartialEq, Serialize, Deserialize)]
pub struct ValidatorInfoRequestData { pub struct ValidatorInfoRequestData {
#[serde(with = "eth2_serde_utils::quoted_u64_vec")] #[serde(with = "serde_utils::quoted_u64_vec")]
indices: Vec<u64>, indices: Vec<u64>,
} }
#[derive(PartialEq, Serialize, Deserialize)] #[derive(PartialEq, Serialize, Deserialize)]
pub struct ValidatorInfoValues { pub struct ValidatorInfoValues {
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
epoch: u64, epoch: u64,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
total_balance: u64, total_balance: u64,
} }

View File

@ -8,13 +8,13 @@ edition = "2021"
discv5 = { version = "0.2.2", features = ["libp2p"] } discv5 = { version = "0.2.2", features = ["libp2p"] }
unsigned-varint = { version = "0.6.0", features = ["codec"] } unsigned-varint = { version = "0.6.0", features = ["codec"] }
types = { path = "../../consensus/types" } types = { path = "../../consensus/types" }
eth2_ssz_types = "0.2.2" ssz_types = "0.5.0"
serde = { version = "1.0.116", features = ["derive"] } serde = { version = "1.0.116", features = ["derive"] }
serde_derive = "1.0.116" serde_derive = "1.0.116"
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_ssz_derive = "0.3.0" ethereum_ssz_derive = "0.5.0"
tree_hash = "0.4.1" tree_hash = "0.5.0"
tree_hash_derive = "0.4.0" tree_hash_derive = "0.5.0"
slog = { version = "2.5.2", features = ["max_level_trace"] } slog = { version = "2.5.2", features = ["max_level_trace"] }
lighthouse_version = { path = "../../common/lighthouse_version" } lighthouse_version = { path = "../../common/lighthouse_version" }
tokio = { version = "1.14.0", features = ["time", "macros"] } tokio = { version = "1.14.0", features = ["time", "macros"] }

View File

@ -21,8 +21,8 @@ types = { path = "../../consensus/types" }
slot_clock = { path = "../../common/slot_clock" } slot_clock = { path = "../../common/slot_clock" }
slog = { version = "2.5.2", features = ["max_level_trace"] } slog = { version = "2.5.2", features = ["max_level_trace"] }
hex = "0.4.2" hex = "0.4.2"
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_ssz_types = "0.2.2" ssz_types = "0.5.0"
futures = "0.3.7" futures = "0.3.7"
error-chain = "0.12.4" error-chain = "0.12.4"
tokio = { version = "1.14.0", features = ["full"] } tokio = { version = "1.14.0", features = ["full"] }

View File

@ -12,8 +12,8 @@ lighthouse_metrics = { path = "../../common/lighthouse_metrics" }
parking_lot = "0.12.0" parking_lot = "0.12.0"
types = { path = "../../consensus/types" } types = { path = "../../consensus/types" }
state_processing = { path = "../../consensus/state_processing" } state_processing = { path = "../../consensus/state_processing" }
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_ssz_derive = "0.3.1" ethereum_ssz_derive = "0.5.0"
rayon = "1.5.0" rayon = "1.5.0"
serde = "1.0.116" serde = "1.0.116"
serde_derive = "1.0.116" serde_derive = "1.0.116"

View File

@ -13,8 +13,8 @@ db-key = "0.0.5"
leveldb = { version = "0.8.6", default-features = false } leveldb = { version = "0.8.6", default-features = false }
parking_lot = "0.12.0" parking_lot = "0.12.0"
itertools = "0.10.0" itertools = "0.10.0"
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_ssz_derive = "0.3.1" ethereum_ssz_derive = "0.5.0"
types = { path = "../../consensus/types" } types = { path = "../../consensus/types" }
state_processing = { path = "../../consensus/state_processing" } state_processing = { path = "../../consensus/state_processing" }
slog = "2.5.2" slog = "2.5.2"

View File

@ -10,7 +10,7 @@ clap = "2.33.3"
clap_utils = { path = "../common/clap_utils" } clap_utils = { path = "../common/clap_utils" }
lighthouse_network = { path = "../beacon_node/lighthouse_network" } lighthouse_network = { path = "../beacon_node/lighthouse_network" }
types = { path = "../consensus/types" } types = { path = "../consensus/types" }
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
slog = "2.5.2" slog = "2.5.2"
tokio = "1.14.0" tokio = "1.14.0"
log = "0.4.11" log = "0.4.11"

View File

@ -11,7 +11,7 @@ clap = "2.33.3"
hex = "0.4.2" hex = "0.4.2"
dirs = "3.0.1" dirs = "3.0.1"
eth2_network_config = { path = "../eth2_network_config" } eth2_network_config = { path = "../eth2_network_config" }
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
ethereum-types = "0.14.1" ethereum-types = "0.14.1"
serde = "1.0.116" serde = "1.0.116"
serde_json = "1.0.59" serde_json = "1.0.59"

View File

@ -14,6 +14,6 @@ hex = "0.4.2"
[dependencies] [dependencies]
types = { path = "../../consensus/types"} types = { path = "../../consensus/types"}
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
tree_hash = "0.4.1" tree_hash = "0.5.0"
ethabi = "16.0.0" ethabi = "16.0.0"

View File

@ -13,15 +13,15 @@ types = { path = "../../consensus/types" }
reqwest = { version = "0.11.0", features = ["json","stream"] } reqwest = { version = "0.11.0", features = ["json","stream"] }
lighthouse_network = { path = "../../beacon_node/lighthouse_network" } lighthouse_network = { path = "../../beacon_node/lighthouse_network" }
proto_array = { path = "../../consensus/proto_array", optional = true } proto_array = { path = "../../consensus/proto_array", optional = true }
eth2_serde_utils = "0.1.1" ethereum_serde_utils = "0.5.0"
eth2_keystore = { path = "../../crypto/eth2_keystore" } eth2_keystore = { path = "../../crypto/eth2_keystore" }
libsecp256k1 = "0.7.0" libsecp256k1 = "0.7.0"
ring = "0.16.19" ring = "0.16.19"
bytes = "1.0.1" bytes = "1.0.1"
account_utils = { path = "../../common/account_utils" } account_utils = { path = "../../common/account_utils" }
sensitive_url = { path = "../../common/sensitive_url" } sensitive_url = { path = "../../common/sensitive_url" }
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_ssz_derive = "0.3.1" ethereum_ssz_derive = "0.5.0"
futures-util = "0.3.8" futures-util = "0.3.8"
futures = "0.3.8" futures = "0.3.8"
store = { path = "../../beacon_node/store", optional = true } store = { path = "../../beacon_node/store", optional = true }

View File

@ -6,32 +6,32 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub struct IdealAttestationRewards { pub struct IdealAttestationRewards {
// Validator's effective balance in gwei // Validator's effective balance in gwei
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub effective_balance: u64, pub effective_balance: u64,
// Ideal attester's reward for head vote in gwei // Ideal attester's reward for head vote in gwei
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub head: u64, pub head: u64,
// Ideal attester's reward for target vote in gwei // Ideal attester's reward for target vote in gwei
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub target: u64, pub target: u64,
// Ideal attester's reward for source vote in gwei // Ideal attester's reward for source vote in gwei
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub source: u64, pub source: u64,
} }
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub struct TotalAttestationRewards { pub struct TotalAttestationRewards {
// one entry for every validator based on their attestations in the epoch // one entry for every validator based on their attestations in the epoch
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub validator_index: u64, pub validator_index: u64,
// attester's reward for head vote in gwei // attester's reward for head vote in gwei
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub head: u64, pub head: u64,
// attester's reward for target vote in gwei // attester's reward for target vote in gwei
#[serde(with = "eth2_serde_utils::quoted_i64")] #[serde(with = "serde_utils::quoted_i64")]
pub target: i64, pub target: i64,
// attester's reward for source vote in gwei // attester's reward for source vote in gwei
#[serde(with = "eth2_serde_utils::quoted_i64")] #[serde(with = "serde_utils::quoted_i64")]
pub source: i64, pub source: i64,
// TBD attester's inclusion_delay reward in gwei (phase0 only) // TBD attester's inclusion_delay reward in gwei (phase0 only)
// pub inclusion_delay: u64, // pub inclusion_delay: u64,

View File

@ -5,22 +5,22 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct StandardBlockReward { pub struct StandardBlockReward {
// proposer of the block, the proposer index who receives these rewards // proposer of the block, the proposer index who receives these rewards
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub proposer_index: u64, pub proposer_index: u64,
// total block reward in gwei, // total block reward in gwei,
// equal to attestations + sync_aggregate + proposer_slashings + attester_slashings // equal to attestations + sync_aggregate + proposer_slashings + attester_slashings
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub total: u64, pub total: u64,
// block reward component due to included attestations in gwei // block reward component due to included attestations in gwei
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub attestations: u64, pub attestations: u64,
// block reward component due to included sync_aggregate in gwei // block reward component due to included sync_aggregate in gwei
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub sync_aggregate: u64, pub sync_aggregate: u64,
// block reward component due to included proposer_slashings in gwei // block reward component due to included proposer_slashings in gwei
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub proposer_slashings: u64, pub proposer_slashings: u64,
// block reward component due to included attester_slashings in gwei // block reward component due to included attester_slashings in gwei
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub attester_slashings: u64, pub attester_slashings: u64,
} }

View File

@ -5,9 +5,9 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct SyncCommitteeReward { pub struct SyncCommitteeReward {
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub validator_index: u64, pub validator_index: u64,
// sync committee reward in gwei for the validator // sync committee reward in gwei for the validator
#[serde(with = "eth2_serde_utils::quoted_i64")] #[serde(with = "serde_utils::quoted_i64")]
pub reward: i64, pub reward: i64,
} }

View File

@ -57,7 +57,7 @@ pub fn parse_pubkey(secret: &str) -> Result<Option<PublicKey>, Error> {
&secret[SECRET_PREFIX.len()..] &secret[SECRET_PREFIX.len()..]
}; };
eth2_serde_utils::hex::decode(secret) serde_utils::hex::decode(secret)
.map_err(|e| Error::InvalidSecret(format!("invalid hex: {:?}", e))) .map_err(|e| Error::InvalidSecret(format!("invalid hex: {:?}", e)))
.and_then(|bytes| { .and_then(|bytes| {
if bytes.len() != PK_LEN { if bytes.len() != PK_LEN {
@ -174,7 +174,7 @@ impl ValidatorClientHttpClient {
let message = let message =
Message::parse_slice(digest(&SHA256, &body).as_ref()).expect("sha256 is 32 bytes"); Message::parse_slice(digest(&SHA256, &body).as_ref()).expect("sha256 is 32 bytes");
eth2_serde_utils::hex::decode(&sig) serde_utils::hex::decode(&sig)
.ok() .ok()
.and_then(|bytes| { .and_then(|bytes| {
let sig = Signature::parse_der(&bytes).ok()?; let sig = Signature::parse_der(&bytes).ok()?;

View File

@ -13,7 +13,7 @@ pub struct GetFeeRecipientResponse {
#[derive(Debug, Deserialize, Serialize, PartialEq)] #[derive(Debug, Deserialize, Serialize, PartialEq)]
pub struct GetGasLimitResponse { pub struct GetGasLimitResponse {
pub pubkey: PublicKeyBytes, pub pubkey: PublicKeyBytes,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub gas_limit: u64, pub gas_limit: u64,
} }
@ -45,7 +45,7 @@ pub struct ImportKeystoresRequest {
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
#[serde(transparent)] #[serde(transparent)]
pub struct KeystoreJsonStr(#[serde(with = "eth2_serde_utils::json_str")] pub Keystore); pub struct KeystoreJsonStr(#[serde(with = "serde_utils::json_str")] pub Keystore);
impl std::ops::Deref for KeystoreJsonStr { impl std::ops::Deref for KeystoreJsonStr {
type Target = Keystore; type Target = Keystore;
@ -56,7 +56,7 @@ impl std::ops::Deref for KeystoreJsonStr {
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
#[serde(transparent)] #[serde(transparent)]
pub struct InterchangeJsonStr(#[serde(with = "eth2_serde_utils::json_str")] pub Interchange); pub struct InterchangeJsonStr(#[serde(with = "serde_utils::json_str")] pub Interchange);
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
pub struct ImportKeystoresResponse { pub struct ImportKeystoresResponse {
@ -103,7 +103,7 @@ pub struct DeleteKeystoresRequest {
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
pub struct DeleteKeystoresResponse { pub struct DeleteKeystoresResponse {
pub data: Vec<Status<DeleteKeystoreStatus>>, pub data: Vec<Status<DeleteKeystoreStatus>>,
#[serde(with = "eth2_serde_utils::json_str")] #[serde(with = "serde_utils::json_str")]
pub slashing_protection: Interchange, pub slashing_protection: Interchange,
} }

View File

@ -32,14 +32,14 @@ pub struct ValidatorRequest {
#[serde(default)] #[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub builder_proposals: Option<bool>, pub builder_proposals: Option<bool>,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub deposit_gwei: u64, pub deposit_gwei: u64,
} }
#[derive(Clone, PartialEq, Serialize, Deserialize)] #[derive(Clone, PartialEq, Serialize, Deserialize)]
pub struct CreateValidatorsMnemonicRequest { pub struct CreateValidatorsMnemonicRequest {
pub mnemonic: ZeroizeString, pub mnemonic: ZeroizeString,
#[serde(with = "eth2_serde_utils::quoted_u32")] #[serde(with = "serde_utils::quoted_u32")]
pub key_derivation_path_offset: u32, pub key_derivation_path_offset: u32,
pub validators: Vec<ValidatorRequest>, pub validators: Vec<ValidatorRequest>,
} }
@ -62,7 +62,7 @@ pub struct CreatedValidator {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub builder_proposals: Option<bool>, pub builder_proposals: Option<bool>,
pub eth1_deposit_tx_data: String, pub eth1_deposit_tx_data: String,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub deposit_gwei: u64, pub deposit_gwei: u64,
} }
@ -141,7 +141,7 @@ pub struct UpdateFeeRecipientRequest {
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct UpdateGasLimitRequest { pub struct UpdateGasLimitRequest {
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub gas_limit: u64, pub gas_limit: u64,
} }

View File

@ -82,10 +82,10 @@ impl std::fmt::Display for EndpointVersion {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct GenesisData { pub struct GenesisData {
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub genesis_time: u64, pub genesis_time: u64,
pub genesis_validators_root: Hash256, pub genesis_validators_root: Hash256,
#[serde(with = "eth2_serde_utils::bytes_4_hex")] #[serde(with = "serde_utils::bytes_4_hex")]
pub genesis_fork_version: [u8; 4], pub genesis_fork_version: [u8; 4],
} }
@ -316,9 +316,9 @@ impl fmt::Display for ValidatorId {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ValidatorData { pub struct ValidatorData {
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub index: u64, pub index: u64,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub balance: u64, pub balance: u64,
pub status: ValidatorStatus, pub status: ValidatorStatus,
pub validator: Validator, pub validator: Validator,
@ -326,9 +326,9 @@ pub struct ValidatorData {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ValidatorBalanceData { pub struct ValidatorBalanceData {
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub index: u64, pub index: u64,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub balance: u64, pub balance: u64,
} }
@ -491,16 +491,16 @@ pub struct ValidatorsQuery {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct CommitteeData { pub struct CommitteeData {
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub index: u64, pub index: u64,
pub slot: Slot, pub slot: Slot,
#[serde(with = "eth2_serde_utils::quoted_u64_vec")] #[serde(with = "serde_utils::quoted_u64_vec")]
pub validators: Vec<u64>, pub validators: Vec<u64>,
} }
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct SyncCommitteeByValidatorIndices { pub struct SyncCommitteeByValidatorIndices {
#[serde(with = "eth2_serde_utils::quoted_u64_vec")] #[serde(with = "serde_utils::quoted_u64_vec")]
pub validators: Vec<u64>, pub validators: Vec<u64>,
pub validator_aggregates: Vec<SyncSubcommittee>, pub validator_aggregates: Vec<SyncSubcommittee>,
} }
@ -513,7 +513,7 @@ pub struct RandaoMix {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(transparent)] #[serde(transparent)]
pub struct SyncSubcommittee { pub struct SyncSubcommittee {
#[serde(with = "eth2_serde_utils::quoted_u64_vec")] #[serde(with = "serde_utils::quoted_u64_vec")]
pub indices: Vec<u64>, pub indices: Vec<u64>,
} }
@ -538,7 +538,7 @@ pub struct BlockHeaderData {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct DepositContractData { pub struct DepositContractData {
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub chain_id: u64, pub chain_id: u64,
pub address: Address, pub address: Address,
} }
@ -562,7 +562,7 @@ pub struct IdentityData {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct MetaData { pub struct MetaData {
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub seq_number: u64, pub seq_number: u64,
pub attnets: String, pub attnets: String,
pub syncnets: String, pub syncnets: String,
@ -649,27 +649,27 @@ pub struct ValidatorBalancesQuery {
#[derive(Clone, Serialize, Deserialize)] #[derive(Clone, Serialize, Deserialize)]
#[serde(transparent)] #[serde(transparent)]
pub struct ValidatorIndexData(#[serde(with = "eth2_serde_utils::quoted_u64_vec")] pub Vec<u64>); pub struct ValidatorIndexData(#[serde(with = "serde_utils::quoted_u64_vec")] pub Vec<u64>);
/// Borrowed variant of `ValidatorIndexData`, for serializing/sending. /// Borrowed variant of `ValidatorIndexData`, for serializing/sending.
#[derive(Clone, Copy, Serialize)] #[derive(Clone, Copy, Serialize)]
#[serde(transparent)] #[serde(transparent)]
pub struct ValidatorIndexDataRef<'a>( pub struct ValidatorIndexDataRef<'a>(
#[serde(serialize_with = "eth2_serde_utils::quoted_u64_vec::serialize")] pub &'a [u64], #[serde(serialize_with = "serde_utils::quoted_u64_vec::serialize")] pub &'a [u64],
); );
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AttesterData { pub struct AttesterData {
pub pubkey: PublicKeyBytes, pub pubkey: PublicKeyBytes,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub validator_index: u64, pub validator_index: u64,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub committees_at_slot: u64, pub committees_at_slot: u64,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub committee_index: CommitteeIndex, pub committee_index: CommitteeIndex,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub committee_length: u64, pub committee_length: u64,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub validator_committee_index: u64, pub validator_committee_index: u64,
pub slot: Slot, pub slot: Slot,
} }
@ -677,7 +677,7 @@ pub struct AttesterData {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ProposerData { pub struct ProposerData {
pub pubkey: PublicKeyBytes, pub pubkey: PublicKeyBytes,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub validator_index: u64, pub validator_index: u64,
pub slot: Slot, pub slot: Slot,
} }
@ -726,11 +726,11 @@ pub struct ValidatorAggregateAttestationQuery {
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
pub struct BeaconCommitteeSubscription { pub struct BeaconCommitteeSubscription {
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub validator_index: u64, pub validator_index: u64,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub committee_index: u64, pub committee_index: u64,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub committees_at_slot: u64, pub committees_at_slot: u64,
pub slot: Slot, pub slot: Slot,
pub is_aggregator: bool, pub is_aggregator: bool,
@ -851,13 +851,13 @@ impl fmt::Display for PeerDirection {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct PeerCount { pub struct PeerCount {
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub connected: u64, pub connected: u64,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub connecting: u64, pub connecting: u64,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub disconnected: u64, pub disconnected: u64,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub disconnecting: u64, pub disconnecting: u64,
} }
@ -892,7 +892,7 @@ pub struct SseHead {
#[derive(PartialEq, Debug, Serialize, Deserialize, Clone)] #[derive(PartialEq, Debug, Serialize, Deserialize, Clone)]
pub struct SseChainReorg { pub struct SseChainReorg {
pub slot: Slot, pub slot: Slot,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub depth: u64, pub depth: u64,
pub old_head_block: Hash256, pub old_head_block: Hash256,
pub old_head_state: Hash256, pub old_head_state: Hash256,
@ -925,7 +925,7 @@ pub struct SseLateHead {
#[serde(untagged)] #[serde(untagged)]
pub struct SsePayloadAttributes { pub struct SsePayloadAttributes {
#[superstruct(getter(copy))] #[superstruct(getter(copy))]
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub timestamp: u64, pub timestamp: u64,
#[superstruct(getter(copy))] #[superstruct(getter(copy))]
pub prev_randao: Hash256, pub prev_randao: Hash256,
@ -938,10 +938,10 @@ pub struct SsePayloadAttributes {
#[derive(PartialEq, Debug, Deserialize, Serialize, Clone)] #[derive(PartialEq, Debug, Deserialize, Serialize, Clone)]
pub struct SseExtendedPayloadAttributesGeneric<T> { pub struct SseExtendedPayloadAttributesGeneric<T> {
pub proposal_slot: Slot, pub proposal_slot: Slot,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub proposer_index: u64, pub proposer_index: u64,
pub parent_block_root: Hash256, pub parent_block_root: Hash256,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub parent_block_number: u64, pub parent_block_number: u64,
pub parent_block_hash: ExecutionBlockHash, pub parent_block_hash: ExecutionBlockHash,
pub payload_attributes: T, pub payload_attributes: T,
@ -1205,13 +1205,13 @@ fn parse_accept(accept: &str) -> Result<Vec<Mime>, String> {
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct LivenessRequestData { pub struct LivenessRequestData {
pub epoch: Epoch, pub epoch: Epoch,
#[serde(with = "eth2_serde_utils::quoted_u64_vec")] #[serde(with = "serde_utils::quoted_u64_vec")]
pub indices: Vec<u64>, pub indices: Vec<u64>,
} }
#[derive(PartialEq, Debug, Serialize, Deserialize)] #[derive(PartialEq, Debug, Serialize, Deserialize)]
pub struct LivenessResponseData { pub struct LivenessResponseData {
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub index: u64, pub index: u64,
pub epoch: Epoch, pub epoch: Epoch,
pub is_live: bool, pub is_live: bool,
@ -1231,7 +1231,7 @@ pub struct ForkChoiceNode {
pub parent_root: Option<Hash256>, pub parent_root: Option<Hash256>,
pub justified_epoch: Option<Epoch>, pub justified_epoch: Option<Epoch>,
pub finalized_epoch: Option<Epoch>, pub finalized_epoch: Option<Epoch>,
#[serde(with = "eth2_serde_utils::quoted_u64")] #[serde(with = "serde_utils::quoted_u64")]
pub weight: u64, pub weight: u64,
pub validity: Option<String>, pub validity: Option<String>,
pub execution_block_hash: Option<Hash256>, pub execution_block_hash: Option<Hash256>,

View File

@ -9,7 +9,7 @@ edition = "2021"
[dependencies] [dependencies]
lazy_static = "1.4.0" lazy_static = "1.4.0"
num-bigint = "0.4.2" num-bigint = "0.4.2"
eth2_hashing = "0.3.0" ethereum_hashing = "1.0.0-beta.2"
hex = "0.4.2" hex = "0.4.2"
serde_yaml = "0.8.13" serde_yaml = "0.8.13"
serde = "1.0.116" serde = "1.0.116"

View File

@ -20,7 +20,7 @@
extern crate lazy_static; extern crate lazy_static;
use bls::{Keypair, PublicKey, SecretKey}; use bls::{Keypair, PublicKey, SecretKey};
use eth2_hashing::hash; use ethereum_hashing::hash;
use num_bigint::BigUint; use num_bigint::BigUint;
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
use std::convert::TryInto; use std::convert::TryInto;

View File

@ -16,6 +16,6 @@ tempfile = "3.1.0"
[dependencies] [dependencies]
serde_yaml = "0.8.13" serde_yaml = "0.8.13"
types = { path = "../../consensus/types"} types = { path = "../../consensus/types"}
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_config = { path = "../eth2_config"} eth2_config = { path = "../eth2_config"}
discv5 = "0.2.2" discv5 = "0.2.2"

View File

@ -16,7 +16,7 @@ filesystem = { path = "../filesystem" }
types = { path = "../../consensus/types" } types = { path = "../../consensus/types" }
rand = "0.8.5" rand = "0.8.5"
deposit_contract = { path = "../deposit_contract" } deposit_contract = { path = "../deposit_contract" }
tree_hash = "0.4.1" tree_hash = "0.5.0"
hex = "0.4.2" hex = "0.4.2"
derivative = "2.1.1" derivative = "2.1.1"
lockfile = { path = "../lockfile" } lockfile = { path = "../lockfile" }

View File

@ -6,11 +6,11 @@ edition = "2021"
[dependencies] [dependencies]
ethereum-types = "0.14.1" ethereum-types = "0.14.1"
eth2_ssz_types = "0.2.2" ssz_types = "0.5.0"
eth2_hashing = "0.3.0" ethereum_hashing = "1.0.0-beta.2"
eth2_ssz_derive = "0.3.1" ethereum_ssz_derive = "0.5.0"
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
tree_hash = "0.4.1" tree_hash = "0.5.0"
smallvec = "1.6.1" smallvec = "1.6.1"
[dev-dependencies] [dev-dependencies]

View File

@ -1,7 +1,7 @@
use crate::cache_arena; use crate::cache_arena;
use crate::SmallVec8; use crate::SmallVec8;
use crate::{Error, Hash256}; use crate::{Error, Hash256};
use eth2_hashing::{hash32_concat, ZERO_HASHES}; use ethereum_hashing::{hash32_concat, ZERO_HASHES};
use smallvec::smallvec; use smallvec::smallvec;
use ssz_derive::{Decode, Encode}; use ssz_derive::{Decode, Encode};
use tree_hash::BYTES_PER_CHUNK; use tree_hash::BYTES_PER_CHUNK;

View File

@ -1,6 +1,6 @@
use crate::impls::hash256_iter; use crate::impls::hash256_iter;
use crate::{CacheArena, CachedTreeHash, Error, Hash256, TreeHashCache}; use crate::{CacheArena, CachedTreeHash, Error, Hash256, TreeHashCache};
use eth2_hashing::ZERO_HASHES; use ethereum_hashing::ZERO_HASHES;
use quickcheck_macros::quickcheck; use quickcheck_macros::quickcheck;
use ssz_types::{ use ssz_types::{
typenum::{Unsigned, U16, U255, U256, U257}, typenum::{Unsigned, U16, U255, U256, U257},

View File

@ -10,8 +10,8 @@ edition = "2021"
types = { path = "../types" } types = { path = "../types" }
state_processing = { path = "../state_processing" } state_processing = { path = "../state_processing" }
proto_array = { path = "../proto_array" } proto_array = { path = "../proto_array" }
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_ssz_derive = "0.3.1" ethereum_ssz_derive = "0.5.0"
slog = { version = "2.5.2", features = ["max_level_trace", "release_max_level_trace"] } slog = { version = "2.5.2", features = ["max_level_trace", "release_max_level_trace"] }
[dev-dependencies] [dev-dependencies]

View File

@ -6,7 +6,7 @@ edition = "2021"
[dependencies] [dependencies]
ethereum-types = "0.14.1" ethereum-types = "0.14.1"
eth2_hashing = "0.3.0" ethereum_hashing = "1.0.0-beta.2"
lazy_static = "1.4.0" lazy_static = "1.4.0"
safe_arith = { path = "../safe_arith" } safe_arith = { path = "../safe_arith" }

View File

@ -1,4 +1,4 @@
use eth2_hashing::{hash, hash32_concat, ZERO_HASHES}; use ethereum_hashing::{hash, hash32_concat, ZERO_HASHES};
use ethereum_types::H256; use ethereum_types::H256;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use safe_arith::ArithError; use safe_arith::ArithError;

View File

@ -10,8 +10,8 @@ path = "src/bin.rs"
[dependencies] [dependencies]
types = { path = "../types" } types = { path = "../types" }
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_ssz_derive = "0.3.1" ethereum_ssz_derive = "0.5.0"
serde = "1.0.116" serde = "1.0.116"
serde_derive = "1.0.116" serde_derive = "1.0.116"
serde_yaml = "0.8.13" serde_yaml = "0.8.13"

View File

@ -1,14 +0,0 @@
[package]
name = "eth2_serde_utils"
version = "0.1.1"
authors = ["Paul Hauner <paul@paulhauner.com", "Michael Sproul <michael@sigmaprime.io>"]
edition = "2021"
description = "Serialization and deserialization utilities useful for JSON representations of Ethereum 2.0 types."
license = "Apache-2.0"
[dependencies]
serde = { version = "1.0.116", features = ["derive"] }
serde_derive = "1.0.116"
serde_json = "1.0.58"
hex = "0.4.2"
ethereum-types = "0.14.1"

View File

@ -1,52 +0,0 @@
//! Formats `[u8; n]` as a 0x-prefixed hex string.
//!
//! E.g., `[0, 1, 2, 3]` serializes as `"0x00010203"`.
use crate::hex::PrefixedHexVisitor;
use serde::de::Error;
use serde::{Deserializer, Serializer};
macro_rules! bytes_hex {
($num_bytes: tt) => {
use super::*;
const BYTES_LEN: usize = $num_bytes;
pub fn serialize<S>(bytes: &[u8; BYTES_LEN], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut hex_string: String = "0x".to_string();
hex_string.push_str(&hex::encode(&bytes));
serializer.serialize_str(&hex_string)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<[u8; BYTES_LEN], D::Error>
where
D: Deserializer<'de>,
{
let decoded = deserializer.deserialize_str(PrefixedHexVisitor)?;
if decoded.len() != BYTES_LEN {
return Err(D::Error::custom(format!(
"expected {} bytes for array, got {}",
BYTES_LEN,
decoded.len()
)));
}
let mut array = [0; BYTES_LEN];
array.copy_from_slice(&decoded);
Ok(array)
}
};
}
pub mod bytes_4_hex {
bytes_hex!(4);
}
pub mod bytes_8_hex {
bytes_hex!(8);
}

View File

@ -1,77 +0,0 @@
//! Provides utilities for parsing 0x-prefixed hex strings.
use serde::de::{self, Visitor};
use std::fmt;
/// Encode `data` as a 0x-prefixed hex string.
pub fn encode<T: AsRef<[u8]>>(data: T) -> String {
let hex = hex::encode(data);
let mut s = "0x".to_string();
s.push_str(hex.as_str());
s
}
/// Decode `data` from a 0x-prefixed hex string.
pub fn decode(s: &str) -> Result<Vec<u8>, String> {
if let Some(stripped) = s.strip_prefix("0x") {
hex::decode(stripped).map_err(|e| format!("invalid hex: {:?}", e))
} else {
Err("hex must have 0x prefix".to_string())
}
}
pub struct PrefixedHexVisitor;
impl<'de> Visitor<'de> for PrefixedHexVisitor {
type Value = Vec<u8>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a hex string with 0x prefix")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
decode(value).map_err(de::Error::custom)
}
}
pub struct HexVisitor;
impl<'de> Visitor<'de> for HexVisitor {
type Value = Vec<u8>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a hex string (irrelevant of prefix)")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
hex::decode(value.trim_start_matches("0x"))
.map_err(|e| de::Error::custom(format!("invalid hex ({:?})", e)))
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn encoding() {
let bytes = vec![0, 255];
let hex = encode(bytes);
assert_eq!(hex.as_str(), "0x00ff");
let bytes = vec![];
let hex = encode(bytes);
assert_eq!(hex.as_str(), "0x");
let bytes = vec![1, 2, 3];
let hex = encode(bytes);
assert_eq!(hex.as_str(), "0x010203");
}
}

View File

@ -1,23 +0,0 @@
//! Formats `Vec<u8>` as a 0x-prefixed hex string.
//!
//! E.g., `vec![0, 1, 2, 3]` serializes as `"0x00010203"`.
use crate::hex::PrefixedHexVisitor;
use serde::{Deserializer, Serializer};
pub fn serialize<S>(bytes: &[u8], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut hex_string: String = "0x".to_string();
hex_string.push_str(&hex::encode(bytes));
serializer.serialize_str(&hex_string)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(PrefixedHexVisitor)
}

View File

@ -1,25 +0,0 @@
//! Serialize a datatype as a JSON-blob within a single string.
use serde::{
de::{DeserializeOwned, Error as _},
ser::Error as _,
Deserialize, Deserializer, Serialize, Serializer,
};
/// Serialize as a JSON object within a string.
pub fn serialize<T, S>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
T: Serialize,
{
serializer.serialize_str(&serde_json::to_string(value).map_err(S::Error::custom)?)
}
/// Deserialize a JSON object embedded in a string.
pub fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where
D: Deserializer<'de>,
T: DeserializeOwned,
{
let json_str = String::deserialize(deserializer)?;
serde_json::from_str(&json_str).map_err(D::Error::custom)
}

View File

@ -1,15 +0,0 @@
mod quoted_int;
pub mod fixed_bytes_hex;
pub mod hex;
pub mod hex_vec;
pub mod json_str;
pub mod list_of_bytes_lists;
pub mod quoted_u64_vec;
pub mod u256_hex_be;
pub mod u32_hex;
pub mod u64_hex_be;
pub mod u8_hex;
pub use fixed_bytes_hex::{bytes_4_hex, bytes_8_hex};
pub use quoted_int::{quoted_i64, quoted_u256, quoted_u32, quoted_u64, quoted_u8};

View File

@ -1,49 +0,0 @@
//! Formats `Vec<u64>` using quotes.
//!
//! E.g., `vec![0, 1, 2]` serializes as `["0", "1", "2"]`.
//!
//! Quotes can be optional during decoding.
use crate::hex;
use serde::ser::SerializeSeq;
use serde::{de, Deserializer, Serializer};
pub struct ListOfBytesListVisitor;
impl<'a> serde::de::Visitor<'a> for ListOfBytesListVisitor {
type Value = Vec<Vec<u8>>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a list of 0x-prefixed byte lists")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: serde::de::SeqAccess<'a>,
{
let mut vec = vec![];
while let Some(val) = seq.next_element::<String>()? {
vec.push(hex::decode(&val).map_err(de::Error::custom)?);
}
Ok(vec)
}
}
pub fn serialize<S>(value: &[Vec<u8>], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(value.len()))?;
for val in value {
seq.serialize_element(&hex::encode(val))?;
}
seq.end()
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<Vec<u8>>, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_any(ListOfBytesListVisitor)
}

View File

@ -1,247 +0,0 @@
//! Formats some integer types using quotes.
//!
//! E.g., `1` serializes as `"1"`.
//!
//! Quotes can be optional during decoding.
use ethereum_types::U256;
use serde::{Deserializer, Serializer};
use serde_derive::{Deserialize, Serialize};
use std::convert::TryFrom;
use std::marker::PhantomData;
macro_rules! define_mod {
($int: ty) => {
/// Serde support for deserializing quoted integers.
///
/// Configurable so that quotes are either required or optional.
pub struct QuotedIntVisitor<T> {
require_quotes: bool,
_phantom: PhantomData<T>,
}
impl<'a, T> serde::de::Visitor<'a> for QuotedIntVisitor<T>
where
T: From<$int> + Into<$int> + Copy + TryFrom<u64>,
{
type Value = T;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
if self.require_quotes {
write!(formatter, "a quoted integer")
} else {
write!(formatter, "a quoted or unquoted integer")
}
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
s.parse::<$int>()
.map(T::from)
.map_err(serde::de::Error::custom)
}
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
if self.require_quotes {
Err(serde::de::Error::custom(
"received unquoted integer when quotes are required",
))
} else {
T::try_from(v).map_err(|_| serde::de::Error::custom("invalid integer"))
}
}
}
/// Compositional wrapper type that allows quotes or no quotes.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Deserialize, Serialize)]
#[serde(transparent)]
pub struct MaybeQuoted<T>
where
T: From<$int> + Into<$int> + Copy + TryFrom<u64>,
{
#[serde(with = "self")]
pub value: T,
}
/// Wrapper type for requiring quotes on a `$int`-like type.
///
/// Unlike using `serde(with = "quoted_$int::require_quotes")` this is composable, and can be nested
/// inside types like `Option`, `Result` and `Vec`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Deserialize, Serialize)]
#[serde(transparent)]
pub struct Quoted<T>
where
T: From<$int> + Into<$int> + Copy + TryFrom<u64>,
{
#[serde(with = "require_quotes")]
pub value: T,
}
/// Serialize with quotes.
pub fn serialize<S, T>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
T: From<$int> + Into<$int> + Copy,
{
let v: $int = (*value).into();
serializer.serialize_str(&format!("{}", v))
}
/// Deserialize with or without quotes.
pub fn deserialize<'de, D, T>(deserializer: D) -> Result<T, D::Error>
where
D: Deserializer<'de>,
T: From<$int> + Into<$int> + Copy + TryFrom<u64>,
{
deserializer.deserialize_any(QuotedIntVisitor {
require_quotes: false,
_phantom: PhantomData,
})
}
/// Requires quotes when deserializing.
///
/// Usage: `#[serde(with = "quoted_u64::require_quotes")]`.
pub mod require_quotes {
pub use super::serialize;
use super::*;
pub fn deserialize<'de, D, T>(deserializer: D) -> Result<T, D::Error>
where
D: Deserializer<'de>,
T: From<$int> + Into<$int> + Copy + TryFrom<u64>,
{
deserializer.deserialize_any(QuotedIntVisitor {
require_quotes: true,
_phantom: PhantomData,
})
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn require_quotes() {
let x = serde_json::from_str::<Quoted<$int>>("\"8\"").unwrap();
assert_eq!(x.value, 8);
serde_json::from_str::<Quoted<$int>>("8").unwrap_err();
}
}
};
}
pub mod quoted_u8 {
use super::*;
define_mod!(u8);
}
pub mod quoted_u32 {
use super::*;
define_mod!(u32);
}
pub mod quoted_u64 {
use super::*;
define_mod!(u64);
}
pub mod quoted_i64 {
use super::*;
define_mod!(i64);
}
pub mod quoted_u256 {
use super::*;
struct U256Visitor;
impl<'de> serde::de::Visitor<'de> for U256Visitor {
type Value = U256;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a quoted U256 integer")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
U256::from_dec_str(v).map_err(serde::de::Error::custom)
}
}
/// Serialize with quotes.
pub fn serialize<S>(value: &U256, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&format!("{}", value))
}
/// Deserialize with quotes.
pub fn deserialize<'de, D>(deserializer: D) -> Result<U256, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(U256Visitor)
}
}
#[cfg(test)]
mod test {
use super::*;
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(transparent)]
struct WrappedU256(#[serde(with = "quoted_u256")] U256);
#[test]
fn u256_with_quotes() {
assert_eq!(
&serde_json::to_string(&WrappedU256(U256::one())).unwrap(),
"\"1\""
);
assert_eq!(
serde_json::from_str::<WrappedU256>("\"1\"").unwrap(),
WrappedU256(U256::one())
);
}
#[test]
fn u256_without_quotes() {
serde_json::from_str::<WrappedU256>("1").unwrap_err();
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(transparent)]
struct WrappedI64(#[serde(with = "quoted_i64")] i64);
#[test]
fn negative_i64_with_quotes() {
assert_eq!(
serde_json::from_str::<WrappedI64>("\"-200\"").unwrap().0,
-200
);
assert_eq!(
serde_json::to_string(&WrappedI64(-12_500)).unwrap(),
"\"-12500\""
);
}
// It would be OK if this worked, but we don't need it to (i64s should always be quoted).
#[test]
fn negative_i64_without_quotes() {
serde_json::from_str::<WrappedI64>("-200").unwrap_err();
}
}

View File

@ -1,97 +0,0 @@
//! Formats `Vec<u64>` using quotes.
//!
//! E.g., `vec![0, 1, 2]` serializes as `["0", "1", "2"]`.
//!
//! Quotes can be optional during decoding.
use serde::ser::SerializeSeq;
use serde::{Deserializer, Serializer};
use serde_derive::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
#[serde(transparent)]
pub struct QuotedIntWrapper {
#[serde(with = "crate::quoted_u64")]
pub int: u64,
}
pub struct QuotedIntVecVisitor;
impl<'a> serde::de::Visitor<'a> for QuotedIntVecVisitor {
type Value = Vec<u64>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a list of quoted or unquoted integers")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: serde::de::SeqAccess<'a>,
{
let mut vec = vec![];
while let Some(val) = seq.next_element()? {
let val: QuotedIntWrapper = val;
vec.push(val.int);
}
Ok(vec)
}
}
pub fn serialize<S>(value: &[u64], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(value.len()))?;
for &int in value {
seq.serialize_element(&QuotedIntWrapper { int })?;
}
seq.end()
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<u64>, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_any(QuotedIntVecVisitor)
}
#[cfg(test)]
mod test {
use super::*;
#[derive(Debug, Serialize, Deserialize)]
struct Obj {
#[serde(with = "crate::quoted_u64_vec")]
values: Vec<u64>,
}
#[test]
fn quoted_list_success() {
let obj: Obj = serde_json::from_str(r#"{ "values": ["1", "2", "3", "4"] }"#).unwrap();
assert_eq!(obj.values, vec![1, 2, 3, 4]);
}
#[test]
fn unquoted_list_success() {
let obj: Obj = serde_json::from_str(r#"{ "values": [1, 2, 3, 4] }"#).unwrap();
assert_eq!(obj.values, vec![1, 2, 3, 4]);
}
#[test]
fn mixed_list_success() {
let obj: Obj = serde_json::from_str(r#"{ "values": ["1", 2, "3", "4"] }"#).unwrap();
assert_eq!(obj.values, vec![1, 2, 3, 4]);
}
#[test]
fn empty_list_success() {
let obj: Obj = serde_json::from_str(r#"{ "values": [] }"#).unwrap();
assert!(obj.values.is_empty());
}
#[test]
fn whole_list_quoted_err() {
serde_json::from_str::<Obj>(r#"{ "values": "[1, 2, 3, 4]" }"#).unwrap_err();
}
}

View File

@ -1,144 +0,0 @@
use ethereum_types::U256;
use serde::de::Visitor;
use serde::{de, Deserializer, Serialize, Serializer};
use std::fmt;
use std::str::FromStr;
pub fn serialize<S>(num: &U256, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
num.serialize(serializer)
}
pub struct U256Visitor;
impl<'de> Visitor<'de> for U256Visitor {
type Value = String;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a well formatted hex string")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
if !value.starts_with("0x") {
return Err(de::Error::custom("must start with 0x"));
}
let stripped = &value[2..];
if stripped.is_empty() {
Err(de::Error::custom(format!(
"quantity cannot be {:?}",
stripped
)))
} else if stripped == "0" {
Ok(value.to_string())
} else if stripped.starts_with('0') {
Err(de::Error::custom("cannot have leading zero"))
} else {
Ok(value.to_string())
}
}
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<U256, D::Error>
where
D: Deserializer<'de>,
{
let decoded = deserializer.deserialize_string(U256Visitor)?;
U256::from_str(&decoded).map_err(|e| de::Error::custom(format!("Invalid U256 string: {}", e)))
}
#[cfg(test)]
mod test {
use ethereum_types::U256;
use serde::{Deserialize, Serialize};
use serde_json;
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(transparent)]
struct Wrapper {
#[serde(with = "super")]
val: U256,
}
#[test]
fn encoding() {
assert_eq!(
&serde_json::to_string(&Wrapper { val: 0.into() }).unwrap(),
"\"0x0\""
);
assert_eq!(
&serde_json::to_string(&Wrapper { val: 1.into() }).unwrap(),
"\"0x1\""
);
assert_eq!(
&serde_json::to_string(&Wrapper { val: 256.into() }).unwrap(),
"\"0x100\""
);
assert_eq!(
&serde_json::to_string(&Wrapper { val: 65.into() }).unwrap(),
"\"0x41\""
);
assert_eq!(
&serde_json::to_string(&Wrapper { val: 1024.into() }).unwrap(),
"\"0x400\""
);
assert_eq!(
&serde_json::to_string(&Wrapper {
val: U256::max_value() - 1
})
.unwrap(),
"\"0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe\""
);
assert_eq!(
&serde_json::to_string(&Wrapper {
val: U256::max_value()
})
.unwrap(),
"\"0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\""
);
}
#[test]
fn decoding() {
assert_eq!(
serde_json::from_str::<Wrapper>("\"0x0\"").unwrap(),
Wrapper { val: 0.into() },
);
assert_eq!(
serde_json::from_str::<Wrapper>("\"0x41\"").unwrap(),
Wrapper { val: 65.into() },
);
assert_eq!(
serde_json::from_str::<Wrapper>("\"0x400\"").unwrap(),
Wrapper { val: 1024.into() },
);
assert_eq!(
serde_json::from_str::<Wrapper>(
"\"0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe\""
)
.unwrap(),
Wrapper {
val: U256::max_value() - 1
},
);
assert_eq!(
serde_json::from_str::<Wrapper>(
"\"0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\""
)
.unwrap(),
Wrapper {
val: U256::max_value()
},
);
serde_json::from_str::<Wrapper>("\"0x\"").unwrap_err();
serde_json::from_str::<Wrapper>("\"0x0400\"").unwrap_err();
serde_json::from_str::<Wrapper>("\"400\"").unwrap_err();
serde_json::from_str::<Wrapper>("\"ff\"").unwrap_err();
}
}

View File

@ -1,21 +0,0 @@
//! Formats `u32` as a 0x-prefixed, little-endian hex string.
//!
//! E.g., `0` serializes as `"0x00000000"`.
use crate::bytes_4_hex;
use serde::{Deserializer, Serializer};
pub fn serialize<S>(num: &u32, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let hex = format!("0x{}", hex::encode(num.to_le_bytes()));
serializer.serialize_str(&hex)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<u32, D::Error>
where
D: Deserializer<'de>,
{
bytes_4_hex::deserialize(deserializer).map(u32::from_le_bytes)
}

View File

@ -1,134 +0,0 @@
//! Formats `u64` as a 0x-prefixed, big-endian hex string.
//!
//! E.g., `0` serializes as `"0x0000000000000000"`.
use serde::de::{self, Error, Visitor};
use serde::{Deserializer, Serializer};
use std::fmt;
const BYTES_LEN: usize = 8;
pub struct QuantityVisitor;
impl<'de> Visitor<'de> for QuantityVisitor {
type Value = Vec<u8>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a hex string")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
if !value.starts_with("0x") {
return Err(de::Error::custom("must start with 0x"));
}
let stripped = value.trim_start_matches("0x");
if stripped.is_empty() {
Err(de::Error::custom(format!(
"quantity cannot be {}",
stripped
)))
} else if stripped == "0" {
Ok(vec![0])
} else if stripped.starts_with('0') {
Err(de::Error::custom("cannot have leading zero"))
} else if stripped.len() % 2 != 0 {
hex::decode(format!("0{}", stripped))
.map_err(|e| de::Error::custom(format!("invalid hex ({:?})", e)))
} else {
hex::decode(stripped).map_err(|e| de::Error::custom(format!("invalid hex ({:?})", e)))
}
}
}
pub fn serialize<S>(num: &u64, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw = hex::encode(num.to_be_bytes());
let trimmed = raw.trim_start_matches('0');
let hex = if trimmed.is_empty() { "0" } else { trimmed };
serializer.serialize_str(&format!("0x{}", &hex))
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<u64, D::Error>
where
D: Deserializer<'de>,
{
let decoded = deserializer.deserialize_str(QuantityVisitor)?;
// TODO: this is not strict about byte length like other methods.
if decoded.len() > BYTES_LEN {
return Err(D::Error::custom(format!(
"expected max {} bytes for array, got {}",
BYTES_LEN,
decoded.len()
)));
}
let mut array = [0; BYTES_LEN];
array[BYTES_LEN - decoded.len()..].copy_from_slice(&decoded);
Ok(u64::from_be_bytes(array))
}
#[cfg(test)]
mod test {
use serde::{Deserialize, Serialize};
use serde_json;
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(transparent)]
struct Wrapper {
#[serde(with = "super")]
val: u64,
}
#[test]
fn encoding() {
assert_eq!(
&serde_json::to_string(&Wrapper { val: 0 }).unwrap(),
"\"0x0\""
);
assert_eq!(
&serde_json::to_string(&Wrapper { val: 1 }).unwrap(),
"\"0x1\""
);
assert_eq!(
&serde_json::to_string(&Wrapper { val: 256 }).unwrap(),
"\"0x100\""
);
assert_eq!(
&serde_json::to_string(&Wrapper { val: 65 }).unwrap(),
"\"0x41\""
);
assert_eq!(
&serde_json::to_string(&Wrapper { val: 1024 }).unwrap(),
"\"0x400\""
);
}
#[test]
fn decoding() {
assert_eq!(
serde_json::from_str::<Wrapper>("\"0x0\"").unwrap(),
Wrapper { val: 0 },
);
assert_eq!(
serde_json::from_str::<Wrapper>("\"0x41\"").unwrap(),
Wrapper { val: 65 },
);
assert_eq!(
serde_json::from_str::<Wrapper>("\"0x400\"").unwrap(),
Wrapper { val: 1024 },
);
serde_json::from_str::<Wrapper>("\"0x\"").unwrap_err();
serde_json::from_str::<Wrapper>("\"0x0400\"").unwrap_err();
serde_json::from_str::<Wrapper>("\"400\"").unwrap_err();
serde_json::from_str::<Wrapper>("\"ff\"").unwrap_err();
}
}

View File

@ -1,29 +0,0 @@
//! Formats `u8` as a 0x-prefixed hex string.
//!
//! E.g., `0` serializes as `"0x00"`.
use crate::hex::PrefixedHexVisitor;
use serde::de::Error;
use serde::{Deserializer, Serializer};
pub fn serialize<S>(byte: &u8, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let hex = format!("0x{}", hex::encode([*byte]));
serializer.serialize_str(&hex)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<u8, D::Error>
where
D: Deserializer<'de>,
{
let bytes = deserializer.deserialize_str(PrefixedHexVisitor)?;
if bytes.len() != 1 {
return Err(D::Error::custom(format!(
"expected 1 byte for u8, got {}",
bytes.len()
)));
}
Ok(bytes[0])
}

View File

@ -1,21 +0,0 @@
[package]
name = "eth2_ssz"
version = "0.4.1"
authors = ["Paul Hauner <paul@sigmaprime.io>"]
edition = "2021"
description = "SimpleSerialize (SSZ) as used in Ethereum 2.0"
license = "Apache-2.0"
[lib]
name = "ssz"
[dev-dependencies]
eth2_ssz_derive = "0.3.1"
[dependencies]
ethereum-types = "0.14.1"
smallvec = { version = "1.6.1", features = ["const_generics"] }
itertools = "0.10.3"
[features]
arbitrary = ["ethereum-types/arbitrary"]

View File

@ -1,3 +0,0 @@
# simpleserialize (ssz)
[<img src="https://img.shields.io/crates/v/eth2_ssz">](https://crates.io/crates/eth2_ssz)

View File

@ -1,15 +0,0 @@
//! Encode and decode a list many times.
//!
//! Useful for `cargo flamegraph`.
use ssz::{Decode, Encode};
fn main() {
let vec: Vec<u64> = vec![4242; 8196];
let output: Vec<Vec<u64>> = (0..40_000)
.map(|_| Vec::from_ssz_bytes(&vec.as_ssz_bytes()).unwrap())
.collect();
println!("{}", output.len());
}

View File

@ -1,31 +0,0 @@
//! Encode and decode a list many times.
//!
//! Useful for `cargo flamegraph`.
use ssz::{Decode, Encode};
use ssz_derive::{Decode, Encode};
#[derive(Clone, Copy, Encode, Decode)]
pub struct FixedLen {
a: u64,
b: u64,
c: u64,
d: u64,
}
fn main() {
let fixed_len = FixedLen {
a: 42,
b: 42,
c: 42,
d: 42,
};
let vec: Vec<FixedLen> = vec![fixed_len; 8196];
let output: Vec<Vec<u64>> = (0..40_000)
.map(|_| Vec::from_ssz_bytes(&vec.as_ssz_bytes()).unwrap())
.collect();
println!("{}", output.len());
}

View File

@ -1,73 +0,0 @@
use ssz::{Decode, DecodeError, Encode, SszDecoderBuilder, SszEncoder};
#[derive(Debug, PartialEq)]
pub struct Foo {
a: u16,
b: Vec<u8>,
c: u16,
}
impl Encode for Foo {
fn is_ssz_fixed_len() -> bool {
<u16 as Encode>::is_ssz_fixed_len() && <Vec<u16> as Encode>::is_ssz_fixed_len()
}
fn ssz_bytes_len(&self) -> usize {
<u16 as Encode>::ssz_fixed_len()
+ ssz::BYTES_PER_LENGTH_OFFSET
+ <u16 as Encode>::ssz_fixed_len()
+ self.b.ssz_bytes_len()
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
let offset = <u16 as Encode>::ssz_fixed_len()
+ <Vec<u16> as Encode>::ssz_fixed_len()
+ <u16 as Encode>::ssz_fixed_len();
let mut encoder = SszEncoder::container(buf, offset);
encoder.append(&self.a);
encoder.append(&self.b);
encoder.append(&self.c);
encoder.finalize();
}
}
impl Decode for Foo {
fn is_ssz_fixed_len() -> bool {
<u16 as Decode>::is_ssz_fixed_len() && <Vec<u16> as Decode>::is_ssz_fixed_len()
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
let mut builder = SszDecoderBuilder::new(bytes);
builder.register_type::<u16>()?;
builder.register_type::<Vec<u8>>()?;
builder.register_type::<u16>()?;
let mut decoder = builder.build()?;
Ok(Self {
a: decoder.decode_next()?,
b: decoder.decode_next()?,
c: decoder.decode_next()?,
})
}
}
fn main() {
let my_foo = Foo {
a: 42,
b: vec![0, 1, 2, 3],
c: 11,
};
let bytes = vec![42, 0, 8, 0, 0, 0, 11, 0, 0, 1, 2, 3];
assert_eq!(my_foo.as_ssz_bytes(), bytes);
let decoded_foo = Foo::from_ssz_bytes(&bytes).unwrap();
assert_eq!(my_foo, decoded_foo);
}

View File

@ -1,374 +0,0 @@
use super::*;
use smallvec::{smallvec, SmallVec};
use std::cmp::Ordering;
type SmallVec8<T> = SmallVec<[T; 8]>;
pub mod impls;
pub mod try_from_iter;
/// Returned when SSZ decoding fails.
#[derive(Debug, PartialEq, Clone)]
pub enum DecodeError {
/// The bytes supplied were too short to be decoded into the specified type.
InvalidByteLength { len: usize, expected: usize },
/// The given bytes were too short to be read as a length prefix.
InvalidLengthPrefix { len: usize, expected: usize },
/// A length offset pointed to a byte that was out-of-bounds (OOB).
///
/// A bytes may be OOB for the following reasons:
///
/// - It is `>= bytes.len()`.
/// - When decoding variable length items, the 1st offset points "backwards" into the fixed
/// length items (i.e., `length[0] < BYTES_PER_LENGTH_OFFSET`).
/// - When decoding variable-length items, the `n`'th offset was less than the `n-1`'th offset.
OutOfBoundsByte { i: usize },
/// An offset points “backwards” into the fixed-bytes portion of the message, essentially
/// double-decoding bytes that will also be decoded as fixed-length.
///
/// https://notes.ethereum.org/ruKvDXl6QOW3gnqVYb8ezA?view#1-Offset-into-fixed-portion
OffsetIntoFixedPortion(usize),
/// The first offset does not point to the byte that follows the fixed byte portion,
/// essentially skipping a variable-length byte.
///
/// https://notes.ethereum.org/ruKvDXl6QOW3gnqVYb8ezA?view#2-Skip-first-variable-byte
OffsetSkipsVariableBytes(usize),
/// An offset points to bytes prior to the previous offset. Depending on how you look at it,
/// this either double-decodes bytes or makes the first offset a negative-length.
///
/// https://notes.ethereum.org/ruKvDXl6QOW3gnqVYb8ezA?view#3-Offsets-are-decreasing
OffsetsAreDecreasing(usize),
/// An offset references byte indices that do not exist in the source bytes.
///
/// https://notes.ethereum.org/ruKvDXl6QOW3gnqVYb8ezA?view#4-Offsets-are-out-of-bounds
OffsetOutOfBounds(usize),
/// A variable-length list does not have a fixed portion that is cleanly divisible by
/// `BYTES_PER_LENGTH_OFFSET`.
InvalidListFixedBytesLen(usize),
/// Some item has a `ssz_fixed_len` of zero. This is illegal.
ZeroLengthItem,
/// The given bytes were invalid for some application-level reason.
BytesInvalid(String),
/// The given union selector is out of bounds.
UnionSelectorInvalid(u8),
}
/// Performs checks on the `offset` based upon the other parameters provided.
///
/// ## Detail
///
/// - `offset`: the offset bytes (e.g., result of `read_offset(..)`).
/// - `previous_offset`: unless this is the first offset in the SSZ object, the value of the
/// previously-read offset. Used to ensure offsets are not decreasing.
/// - `num_bytes`: the total number of bytes in the SSZ object. Used to ensure the offset is not
/// out of bounds.
/// - `num_fixed_bytes`: the number of fixed-bytes in the struct, if it is known. Used to ensure
/// that the first offset doesn't skip any variable bytes.
///
/// ## References
///
/// The checks here are derived from this document:
///
/// https://notes.ethereum.org/ruKvDXl6QOW3gnqVYb8ezA?view
pub fn sanitize_offset(
offset: usize,
previous_offset: Option<usize>,
num_bytes: usize,
num_fixed_bytes: Option<usize>,
) -> Result<usize, DecodeError> {
if num_fixed_bytes.map_or(false, |fixed_bytes| offset < fixed_bytes) {
Err(DecodeError::OffsetIntoFixedPortion(offset))
} else if previous_offset.is_none()
&& num_fixed_bytes.map_or(false, |fixed_bytes| offset != fixed_bytes)
{
Err(DecodeError::OffsetSkipsVariableBytes(offset))
} else if offset > num_bytes {
Err(DecodeError::OffsetOutOfBounds(offset))
} else if previous_offset.map_or(false, |prev| prev > offset) {
Err(DecodeError::OffsetsAreDecreasing(offset))
} else {
Ok(offset)
}
}
/// Provides SSZ decoding (de-serialization) via the `from_ssz_bytes(&bytes)` method.
///
/// See `examples/` for manual implementations or the crate root for implementations using
/// `#[derive(Decode)]`.
pub trait Decode: Sized {
/// Returns `true` if this object has a fixed-length.
///
/// I.e., there are no variable length items in this object or any of it's contained objects.
fn is_ssz_fixed_len() -> bool;
/// The number of bytes this object occupies in the fixed-length portion of the SSZ bytes.
///
/// By default, this is set to `BYTES_PER_LENGTH_OFFSET` which is suitable for variable length
/// objects, but not fixed-length objects. Fixed-length objects _must_ return a value which
/// represents their length.
fn ssz_fixed_len() -> usize {
BYTES_PER_LENGTH_OFFSET
}
/// Attempts to decode `Self` from `bytes`, returning a `DecodeError` on failure.
///
/// The supplied bytes must be the exact length required to decode `Self`, excess bytes will
/// result in an error.
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError>;
}
#[derive(Copy, Clone, Debug)]
pub struct Offset {
position: usize,
offset: usize,
}
/// Builds an `SszDecoder`.
///
/// The purpose of this struct is to split some SSZ bytes into individual slices. The builder is
/// then converted into a `SszDecoder` which decodes those values into object instances.
///
/// See [`SszDecoder`](struct.SszDecoder.html) for usage examples.
pub struct SszDecoderBuilder<'a> {
bytes: &'a [u8],
items: SmallVec8<&'a [u8]>,
offsets: SmallVec8<Offset>,
items_index: usize,
}
impl<'a> SszDecoderBuilder<'a> {
/// Instantiate a new builder that should build a `SszDecoder` over the given `bytes` which
/// are assumed to be the SSZ encoding of some object.
pub fn new(bytes: &'a [u8]) -> Self {
Self {
bytes,
items: smallvec![],
offsets: smallvec![],
items_index: 0,
}
}
/// Registers a variable-length object as the next item in `bytes`, without specifying the
/// actual type.
///
/// ## Notes
///
/// Use of this function is generally discouraged since it cannot detect if some type changes
/// from variable to fixed length.
///
/// Use `Self::register_type` wherever possible.
pub fn register_anonymous_variable_length_item(&mut self) -> Result<(), DecodeError> {
struct Anonymous;
impl Decode for Anonymous {
fn is_ssz_fixed_len() -> bool {
false
}
fn from_ssz_bytes(_bytes: &[u8]) -> Result<Self, DecodeError> {
unreachable!("Anonymous should never be decoded")
}
}
self.register_type::<Anonymous>()
}
/// Declares that some type `T` is the next item in `bytes`.
pub fn register_type<T: Decode>(&mut self) -> Result<(), DecodeError> {
self.register_type_parameterized(T::is_ssz_fixed_len(), T::ssz_fixed_len())
}
/// Declares that a type with the given parameters is the next item in `bytes`.
pub fn register_type_parameterized(
&mut self,
is_ssz_fixed_len: bool,
ssz_fixed_len: usize,
) -> Result<(), DecodeError> {
if is_ssz_fixed_len {
let start = self.items_index;
self.items_index += ssz_fixed_len;
let slice =
self.bytes
.get(start..self.items_index)
.ok_or(DecodeError::InvalidByteLength {
len: self.bytes.len(),
expected: self.items_index,
})?;
self.items.push(slice);
} else {
self.offsets.push(Offset {
position: self.items.len(),
offset: sanitize_offset(
read_offset(&self.bytes[self.items_index..])?,
self.offsets.last().map(|o| o.offset),
self.bytes.len(),
None,
)?,
});
// Push an empty slice into items; it will be replaced later.
self.items.push(&[]);
self.items_index += BYTES_PER_LENGTH_OFFSET;
}
Ok(())
}
fn finalize(&mut self) -> Result<(), DecodeError> {
if let Some(first_offset) = self.offsets.first().map(|o| o.offset) {
// Check to ensure the first offset points to the byte immediately following the
// fixed-length bytes.
match first_offset.cmp(&self.items_index) {
Ordering::Less => return Err(DecodeError::OffsetIntoFixedPortion(first_offset)),
Ordering::Greater => {
return Err(DecodeError::OffsetSkipsVariableBytes(first_offset))
}
Ordering::Equal => (),
}
// Iterate through each pair of offsets, grabbing the slice between each of the offsets.
for pair in self.offsets.windows(2) {
let a = pair[0];
let b = pair[1];
self.items[a.position] = &self.bytes[a.offset..b.offset];
}
// Handle the last offset, pushing a slice from it's start through to the end of
// `self.bytes`.
if let Some(last) = self.offsets.last() {
self.items[last.position] = &self.bytes[last.offset..]
}
} else {
// If the container is fixed-length, ensure there are no excess bytes.
if self.items_index != self.bytes.len() {
return Err(DecodeError::InvalidByteLength {
len: self.bytes.len(),
expected: self.items_index,
});
}
}
Ok(())
}
/// Finalizes the builder, returning a `SszDecoder` that may be used to instantiate objects.
pub fn build(mut self) -> Result<SszDecoder<'a>, DecodeError> {
self.finalize()?;
Ok(SszDecoder { items: self.items })
}
}
/// Decodes some slices of SSZ into object instances. Should be instantiated using
/// [`SszDecoderBuilder`](struct.SszDecoderBuilder.html).
///
/// ## Example
///
/// ```rust
/// use ssz_derive::{Encode, Decode};
/// use ssz::{Decode, Encode, SszDecoder, SszDecoderBuilder};
///
/// #[derive(PartialEq, Debug, Encode, Decode)]
/// struct Foo {
/// a: u64,
/// b: Vec<u16>,
/// }
///
/// fn ssz_decoding_example() {
/// let foo = Foo {
/// a: 42,
/// b: vec![1, 3, 3, 7]
/// };
///
/// let bytes = foo.as_ssz_bytes();
///
/// let mut builder = SszDecoderBuilder::new(&bytes);
///
/// builder.register_type::<u64>().unwrap();
/// builder.register_type::<Vec<u16>>().unwrap();
///
/// let mut decoder = builder.build().unwrap();
///
/// let decoded_foo = Foo {
/// a: decoder.decode_next().unwrap(),
/// b: decoder.decode_next().unwrap(),
/// };
///
/// assert_eq!(foo, decoded_foo);
/// }
///
/// ```
pub struct SszDecoder<'a> {
items: SmallVec8<&'a [u8]>,
}
impl<'a> SszDecoder<'a> {
/// Decodes the next item.
///
/// # Panics
///
/// Panics when attempting to decode more items than actually exist.
pub fn decode_next<T: Decode>(&mut self) -> Result<T, DecodeError> {
self.decode_next_with(|slice| T::from_ssz_bytes(slice))
}
/// Decodes the next item using the provided function.
pub fn decode_next_with<T, F>(&mut self, f: F) -> Result<T, DecodeError>
where
F: FnOnce(&'a [u8]) -> Result<T, DecodeError>,
{
f(self.items.remove(0))
}
}
/// Takes `bytes`, assuming it is the encoding for a SSZ union, and returns the union-selector and
/// the body (trailing bytes).
///
/// ## Errors
///
/// Returns an error if:
///
/// - `bytes` is empty.
/// - the union selector is not a valid value (i.e., larger than the maximum number of variants.
pub fn split_union_bytes(bytes: &[u8]) -> Result<(UnionSelector, &[u8]), DecodeError> {
let selector = bytes
.first()
.copied()
.ok_or(DecodeError::OutOfBoundsByte { i: 0 })
.and_then(UnionSelector::new)?;
let body = bytes
.get(1..)
.ok_or(DecodeError::OutOfBoundsByte { i: 1 })?;
Ok((selector, body))
}
/// Reads a `BYTES_PER_LENGTH_OFFSET`-byte length from `bytes`, where `bytes.len() >=
/// BYTES_PER_LENGTH_OFFSET`.
pub fn read_offset(bytes: &[u8]) -> Result<usize, DecodeError> {
decode_offset(bytes.get(0..BYTES_PER_LENGTH_OFFSET).ok_or(
DecodeError::InvalidLengthPrefix {
len: bytes.len(),
expected: BYTES_PER_LENGTH_OFFSET,
},
)?)
}
/// Decode bytes as a little-endian usize, returning an `Err` if `bytes.len() !=
/// BYTES_PER_LENGTH_OFFSET`.
fn decode_offset(bytes: &[u8]) -> Result<usize, DecodeError> {
let len = bytes.len();
let expected = BYTES_PER_LENGTH_OFFSET;
if len != expected {
Err(DecodeError::InvalidLengthPrefix { len, expected })
} else {
let mut array: [u8; BYTES_PER_LENGTH_OFFSET] = std::default::Default::default();
array.clone_from_slice(bytes);
Ok(u32::from_le_bytes(array) as usize)
}
}

View File

@ -1,776 +0,0 @@
use super::*;
use crate::decode::try_from_iter::{TryCollect, TryFromIter};
use core::num::NonZeroUsize;
use ethereum_types::{H160, H256, U128, U256};
use itertools::process_results;
use smallvec::SmallVec;
use std::collections::{BTreeMap, BTreeSet};
use std::iter::{self, FromIterator};
use std::sync::Arc;
macro_rules! impl_decodable_for_uint {
($type: ident, $bit_size: expr) => {
impl Decode for $type {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
$bit_size / 8
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
let len = bytes.len();
let expected = <Self as Decode>::ssz_fixed_len();
if len != expected {
Err(DecodeError::InvalidByteLength { len, expected })
} else {
let mut array: [u8; $bit_size / 8] = std::default::Default::default();
array.clone_from_slice(bytes);
Ok(Self::from_le_bytes(array))
}
}
}
};
}
impl_decodable_for_uint!(u8, 8);
impl_decodable_for_uint!(u16, 16);
impl_decodable_for_uint!(u32, 32);
impl_decodable_for_uint!(u64, 64);
#[cfg(target_pointer_width = "32")]
impl_decodable_for_uint!(usize, 32);
#[cfg(target_pointer_width = "64")]
impl_decodable_for_uint!(usize, 64);
macro_rules! impl_decode_for_tuples {
($(
$Tuple:ident {
$(($idx:tt) -> $T:ident)+
}
)+) => {
$(
impl<$($T: Decode),+> Decode for ($($T,)+) {
fn is_ssz_fixed_len() -> bool {
$(
<$T as Decode>::is_ssz_fixed_len() &&
)*
true
}
fn ssz_fixed_len() -> usize {
if <Self as Decode>::is_ssz_fixed_len() {
$(
<$T as Decode>::ssz_fixed_len() +
)*
0
} else {
BYTES_PER_LENGTH_OFFSET
}
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
let mut builder = SszDecoderBuilder::new(bytes);
$(
builder.register_type::<$T>()?;
)*
let mut decoder = builder.build()?;
Ok(($(
decoder.decode_next::<$T>()?,
)*
))
}
}
)+
}
}
impl_decode_for_tuples! {
Tuple2 {
(0) -> A
(1) -> B
}
Tuple3 {
(0) -> A
(1) -> B
(2) -> C
}
Tuple4 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
}
Tuple5 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
}
Tuple6 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
}
Tuple7 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
}
Tuple8 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
}
Tuple9 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
}
Tuple10 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
}
Tuple11 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
}
Tuple12 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
}
}
impl Decode for bool {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
1
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
let len = bytes.len();
let expected = <Self as Decode>::ssz_fixed_len();
if len != expected {
Err(DecodeError::InvalidByteLength { len, expected })
} else {
match bytes[0] {
0b0000_0000 => Ok(false),
0b0000_0001 => Ok(true),
_ => Err(DecodeError::BytesInvalid(format!(
"Out-of-range for boolean: {}",
bytes[0]
))),
}
}
}
}
impl Decode for NonZeroUsize {
fn is_ssz_fixed_len() -> bool {
<usize as Decode>::is_ssz_fixed_len()
}
fn ssz_fixed_len() -> usize {
<usize as Decode>::ssz_fixed_len()
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
let x = usize::from_ssz_bytes(bytes)?;
if x == 0 {
Err(DecodeError::BytesInvalid(
"NonZeroUsize cannot be zero.".to_string(),
))
} else {
// `unwrap` is safe here as `NonZeroUsize::new()` succeeds if `x > 0` and this path
// never executes when `x == 0`.
Ok(NonZeroUsize::new(x).unwrap())
}
}
}
impl<T: Decode> Decode for Option<T> {
fn is_ssz_fixed_len() -> bool {
false
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
let (selector, body) = split_union_bytes(bytes)?;
match selector.into() {
0u8 => Ok(None),
1u8 => <T as Decode>::from_ssz_bytes(body).map(Option::Some),
other => Err(DecodeError::UnionSelectorInvalid(other)),
}
}
}
impl<T: Decode> Decode for Arc<T> {
fn is_ssz_fixed_len() -> bool {
T::is_ssz_fixed_len()
}
fn ssz_fixed_len() -> usize {
T::ssz_fixed_len()
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
T::from_ssz_bytes(bytes).map(Arc::new)
}
}
impl Decode for H160 {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
20
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
let len = bytes.len();
let expected = <Self as Decode>::ssz_fixed_len();
if len != expected {
Err(DecodeError::InvalidByteLength { len, expected })
} else {
Ok(Self::from_slice(bytes))
}
}
}
impl Decode for H256 {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
32
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
let len = bytes.len();
let expected = <Self as Decode>::ssz_fixed_len();
if len != expected {
Err(DecodeError::InvalidByteLength { len, expected })
} else {
Ok(H256::from_slice(bytes))
}
}
}
impl Decode for U256 {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
32
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
let len = bytes.len();
let expected = <Self as Decode>::ssz_fixed_len();
if len != expected {
Err(DecodeError::InvalidByteLength { len, expected })
} else {
Ok(U256::from_little_endian(bytes))
}
}
}
impl Decode for U128 {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
16
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
let len = bytes.len();
let expected = <Self as Decode>::ssz_fixed_len();
if len != expected {
Err(DecodeError::InvalidByteLength { len, expected })
} else {
Ok(U128::from_little_endian(bytes))
}
}
}
macro_rules! impl_decodable_for_u8_array {
($len: expr) => {
impl Decode for [u8; $len] {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
$len
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
let len = bytes.len();
let expected = <Self as Decode>::ssz_fixed_len();
if len != expected {
Err(DecodeError::InvalidByteLength { len, expected })
} else {
let mut array: [u8; $len] = [0; $len];
array.copy_from_slice(bytes);
Ok(array)
}
}
}
};
}
impl_decodable_for_u8_array!(4);
impl_decodable_for_u8_array!(32);
impl_decodable_for_u8_array!(48);
macro_rules! impl_for_vec {
($type: ty, $max_len: expr) => {
impl<T: Decode> Decode for $type {
fn is_ssz_fixed_len() -> bool {
false
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
if bytes.is_empty() {
Ok(Self::from_iter(iter::empty()))
} else if T::is_ssz_fixed_len() {
bytes
.chunks(T::ssz_fixed_len())
.map(T::from_ssz_bytes)
.collect()
} else {
decode_list_of_variable_length_items(bytes, $max_len)
}
}
}
};
}
impl_for_vec!(Vec<T>, None);
impl_for_vec!(SmallVec<[T; 1]>, None);
impl_for_vec!(SmallVec<[T; 2]>, None);
impl_for_vec!(SmallVec<[T; 3]>, None);
impl_for_vec!(SmallVec<[T; 4]>, None);
impl_for_vec!(SmallVec<[T; 5]>, None);
impl_for_vec!(SmallVec<[T; 6]>, None);
impl_for_vec!(SmallVec<[T; 7]>, None);
impl_for_vec!(SmallVec<[T; 8]>, None);
impl<K, V> Decode for BTreeMap<K, V>
where
K: Decode + Ord,
V: Decode,
{
fn is_ssz_fixed_len() -> bool {
false
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
if bytes.is_empty() {
Ok(Self::from_iter(iter::empty()))
} else if <(K, V)>::is_ssz_fixed_len() {
bytes
.chunks(<(K, V)>::ssz_fixed_len())
.map(<(K, V)>::from_ssz_bytes)
.collect()
} else {
decode_list_of_variable_length_items(bytes, None)
}
}
}
impl<T> Decode for BTreeSet<T>
where
T: Decode + Ord,
{
fn is_ssz_fixed_len() -> bool {
false
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
if bytes.is_empty() {
Ok(Self::from_iter(iter::empty()))
} else if T::is_ssz_fixed_len() {
bytes
.chunks(T::ssz_fixed_len())
.map(T::from_ssz_bytes)
.collect()
} else {
decode_list_of_variable_length_items(bytes, None)
}
}
}
/// Decodes `bytes` as if it were a list of variable-length items.
///
/// The `ssz::SszDecoder` can also perform this functionality, however this function is
/// significantly faster as it is optimized to read same-typed items whilst `ssz::SszDecoder`
/// supports reading items of differing types.
pub fn decode_list_of_variable_length_items<T: Decode, Container: TryFromIter<T>>(
bytes: &[u8],
max_len: Option<usize>,
) -> Result<Container, DecodeError> {
if bytes.is_empty() {
return Container::try_from_iter(iter::empty()).map_err(|e| {
DecodeError::BytesInvalid(format!("Error trying to collect empty list: {:?}", e))
});
}
let first_offset = read_offset(bytes)?;
sanitize_offset(first_offset, None, bytes.len(), Some(first_offset))?;
if first_offset % BYTES_PER_LENGTH_OFFSET != 0 || first_offset < BYTES_PER_LENGTH_OFFSET {
return Err(DecodeError::InvalidListFixedBytesLen(first_offset));
}
let num_items = first_offset / BYTES_PER_LENGTH_OFFSET;
if max_len.map_or(false, |max| num_items > max) {
return Err(DecodeError::BytesInvalid(format!(
"Variable length list of {} items exceeds maximum of {:?}",
num_items, max_len
)));
}
let mut offset = first_offset;
process_results(
(1..=num_items).map(|i| {
let slice_option = if i == num_items {
bytes.get(offset..)
} else {
let start = offset;
let next_offset = read_offset(&bytes[(i * BYTES_PER_LENGTH_OFFSET)..])?;
offset =
sanitize_offset(next_offset, Some(offset), bytes.len(), Some(first_offset))?;
bytes.get(start..offset)
};
let slice = slice_option.ok_or(DecodeError::OutOfBoundsByte { i: offset })?;
T::from_ssz_bytes(slice)
}),
|iter| iter.try_collect(),
)?
.map_err(|e| DecodeError::BytesInvalid(format!("Error collecting into container: {:?}", e)))
}
#[cfg(test)]
mod tests {
use super::*;
// Note: decoding of valid bytes is generally tested "indirectly" in the `/tests` dir, by
// encoding then decoding the element.
#[test]
fn invalid_u8_array_4() {
assert_eq!(
<[u8; 4]>::from_ssz_bytes(&[0; 3]),
Err(DecodeError::InvalidByteLength {
len: 3,
expected: 4
})
);
assert_eq!(
<[u8; 4]>::from_ssz_bytes(&[0; 5]),
Err(DecodeError::InvalidByteLength {
len: 5,
expected: 4
})
);
}
#[test]
fn invalid_bool() {
assert_eq!(
bool::from_ssz_bytes(&[0; 2]),
Err(DecodeError::InvalidByteLength {
len: 2,
expected: 1
})
);
assert_eq!(
bool::from_ssz_bytes(&[]),
Err(DecodeError::InvalidByteLength {
len: 0,
expected: 1
})
);
if let Err(DecodeError::BytesInvalid(_)) = bool::from_ssz_bytes(&[2]) {
// Success.
} else {
panic!("Did not return error on invalid bool val")
}
}
#[test]
fn invalid_h256() {
assert_eq!(
H256::from_ssz_bytes(&[0; 33]),
Err(DecodeError::InvalidByteLength {
len: 33,
expected: 32
})
);
assert_eq!(
H256::from_ssz_bytes(&[0; 31]),
Err(DecodeError::InvalidByteLength {
len: 31,
expected: 32
})
);
}
#[test]
fn empty_list() {
let vec: Vec<Vec<u16>> = vec![];
let bytes = vec.as_ssz_bytes();
assert!(bytes.is_empty());
assert_eq!(Vec::from_ssz_bytes(&bytes), Ok(vec),);
}
#[test]
fn first_length_points_backwards() {
assert_eq!(
<Vec<Vec<u16>>>::from_ssz_bytes(&[0, 0, 0, 0]),
Err(DecodeError::InvalidListFixedBytesLen(0))
);
assert_eq!(
<Vec<Vec<u16>>>::from_ssz_bytes(&[1, 0, 0, 0]),
Err(DecodeError::InvalidListFixedBytesLen(1))
);
assert_eq!(
<Vec<Vec<u16>>>::from_ssz_bytes(&[2, 0, 0, 0]),
Err(DecodeError::InvalidListFixedBytesLen(2))
);
assert_eq!(
<Vec<Vec<u16>>>::from_ssz_bytes(&[3, 0, 0, 0]),
Err(DecodeError::InvalidListFixedBytesLen(3))
);
}
#[test]
fn lengths_are_decreasing() {
assert_eq!(
<Vec<Vec<u16>>>::from_ssz_bytes(&[12, 0, 0, 0, 14, 0, 0, 0, 12, 0, 0, 0, 1, 0, 1, 0]),
Err(DecodeError::OffsetsAreDecreasing(12))
);
}
#[test]
fn awkward_fixed_length_portion() {
assert_eq!(
<Vec<Vec<u16>>>::from_ssz_bytes(&[10, 0, 0, 0, 10, 0, 0, 0, 0, 0]),
Err(DecodeError::InvalidListFixedBytesLen(10))
);
}
#[test]
fn length_out_of_bounds() {
assert_eq!(
<Vec<Vec<u16>>>::from_ssz_bytes(&[5, 0, 0, 0]),
Err(DecodeError::OffsetOutOfBounds(5))
);
assert_eq!(
<Vec<Vec<u16>>>::from_ssz_bytes(&[8, 0, 0, 0, 9, 0, 0, 0]),
Err(DecodeError::OffsetOutOfBounds(9))
);
assert_eq!(
<Vec<Vec<u16>>>::from_ssz_bytes(&[8, 0, 0, 0, 16, 0, 0, 0]),
Err(DecodeError::OffsetOutOfBounds(16))
);
}
#[test]
fn vec_of_vec_of_u16() {
assert_eq!(
<Vec<Vec<u16>>>::from_ssz_bytes(&[4, 0, 0, 0]),
Ok(vec![vec![]])
);
assert_eq!(
<Vec<u16>>::from_ssz_bytes(&[0, 0, 1, 0, 2, 0, 3, 0]),
Ok(vec![0, 1, 2, 3])
);
assert_eq!(<u16>::from_ssz_bytes(&[16, 0]), Ok(16));
assert_eq!(<u16>::from_ssz_bytes(&[0, 1]), Ok(256));
assert_eq!(<u16>::from_ssz_bytes(&[255, 255]), Ok(65535));
assert_eq!(
<u16>::from_ssz_bytes(&[255]),
Err(DecodeError::InvalidByteLength {
len: 1,
expected: 2
})
);
assert_eq!(
<u16>::from_ssz_bytes(&[]),
Err(DecodeError::InvalidByteLength {
len: 0,
expected: 2
})
);
assert_eq!(
<u16>::from_ssz_bytes(&[0, 1, 2]),
Err(DecodeError::InvalidByteLength {
len: 3,
expected: 2
})
);
}
#[test]
fn vec_of_u16() {
assert_eq!(<Vec<u16>>::from_ssz_bytes(&[0, 0, 0, 0]), Ok(vec![0, 0]));
assert_eq!(
<Vec<u16>>::from_ssz_bytes(&[0, 0, 1, 0, 2, 0, 3, 0]),
Ok(vec![0, 1, 2, 3])
);
assert_eq!(<u16>::from_ssz_bytes(&[16, 0]), Ok(16));
assert_eq!(<u16>::from_ssz_bytes(&[0, 1]), Ok(256));
assert_eq!(<u16>::from_ssz_bytes(&[255, 255]), Ok(65535));
assert_eq!(
<u16>::from_ssz_bytes(&[255]),
Err(DecodeError::InvalidByteLength {
len: 1,
expected: 2
})
);
assert_eq!(
<u16>::from_ssz_bytes(&[]),
Err(DecodeError::InvalidByteLength {
len: 0,
expected: 2
})
);
assert_eq!(
<u16>::from_ssz_bytes(&[0, 1, 2]),
Err(DecodeError::InvalidByteLength {
len: 3,
expected: 2
})
);
}
#[test]
fn u16() {
assert_eq!(<u16>::from_ssz_bytes(&[0, 0]), Ok(0));
assert_eq!(<u16>::from_ssz_bytes(&[16, 0]), Ok(16));
assert_eq!(<u16>::from_ssz_bytes(&[0, 1]), Ok(256));
assert_eq!(<u16>::from_ssz_bytes(&[255, 255]), Ok(65535));
assert_eq!(
<u16>::from_ssz_bytes(&[255]),
Err(DecodeError::InvalidByteLength {
len: 1,
expected: 2
})
);
assert_eq!(
<u16>::from_ssz_bytes(&[]),
Err(DecodeError::InvalidByteLength {
len: 0,
expected: 2
})
);
assert_eq!(
<u16>::from_ssz_bytes(&[0, 1, 2]),
Err(DecodeError::InvalidByteLength {
len: 3,
expected: 2
})
);
}
#[test]
fn tuple() {
assert_eq!(<(u16, u16)>::from_ssz_bytes(&[0, 0, 0, 0]), Ok((0, 0)));
assert_eq!(<(u16, u16)>::from_ssz_bytes(&[16, 0, 17, 0]), Ok((16, 17)));
assert_eq!(<(u16, u16)>::from_ssz_bytes(&[0, 1, 2, 0]), Ok((256, 2)));
assert_eq!(
<(u16, u16)>::from_ssz_bytes(&[255, 255, 0, 0]),
Ok((65535, 0))
);
}
}

View File

@ -1,103 +0,0 @@
use smallvec::SmallVec;
use std::collections::{BTreeMap, BTreeSet};
use std::convert::Infallible;
use std::fmt::Debug;
/// Partial variant of `std::iter::FromIterator`.
///
/// This trait is implemented for types which can be constructed from an iterator of decoded SSZ
/// values, but which may refuse values once a length limit is reached.
pub trait TryFromIter<T>: Sized {
type Error: Debug;
fn try_from_iter<I>(iter: I) -> Result<Self, Self::Error>
where
I: IntoIterator<Item = T>;
}
// It would be nice to be able to do a blanket impl, e.g.
//
// `impl TryFromIter<T> for C where C: FromIterator<T>`
//
// However this runs into trait coherence issues due to the type parameter `T` on `TryFromIter`.
//
// E.g. If we added an impl downstream for `List<T, N>` then another crate downstream of that
// could legally add an impl of `FromIterator<Local> for List<Local, N>` which would create
// two conflicting implementations for `List<Local, N>`. Hence the `List<T, N>` impl is disallowed
// by the compiler in the presence of the blanket impl. That's obviously annoying, so we opt to
// abandon the blanket impl in favour of impls for selected types.
impl<T> TryFromIter<T> for Vec<T> {
type Error = Infallible;
fn try_from_iter<I>(values: I) -> Result<Self, Self::Error>
where
I: IntoIterator<Item = T>,
{
// Pre-allocate the expected size of the Vec, which is parsed from the SSZ input bytes as
// `num_items`. This length has already been checked to be less than or equal to the type's
// maximum length in `decode_list_of_variable_length_items`.
let iter = values.into_iter();
let (_, opt_max_len) = iter.size_hint();
let mut vec = Vec::with_capacity(opt_max_len.unwrap_or(0));
vec.extend(iter);
Ok(vec)
}
}
impl<T, const N: usize> TryFromIter<T> for SmallVec<[T; N]> {
type Error = Infallible;
fn try_from_iter<I>(iter: I) -> Result<Self, Self::Error>
where
I: IntoIterator<Item = T>,
{
Ok(Self::from_iter(iter))
}
}
impl<K, V> TryFromIter<(K, V)> for BTreeMap<K, V>
where
K: Ord,
{
type Error = Infallible;
fn try_from_iter<I>(iter: I) -> Result<Self, Self::Error>
where
I: IntoIterator<Item = (K, V)>,
{
Ok(Self::from_iter(iter))
}
}
impl<T> TryFromIter<T> for BTreeSet<T>
where
T: Ord,
{
type Error = Infallible;
fn try_from_iter<I>(iter: I) -> Result<Self, Self::Error>
where
I: IntoIterator<Item = T>,
{
Ok(Self::from_iter(iter))
}
}
/// Partial variant of `collect`.
pub trait TryCollect: Iterator {
fn try_collect<C>(self) -> Result<C, C::Error>
where
C: TryFromIter<Self::Item>;
}
impl<I> TryCollect for I
where
I: Iterator,
{
fn try_collect<C>(self) -> Result<C, C::Error>
where
C: TryFromIter<Self::Item>,
{
C::try_from_iter(self)
}
}

View File

@ -1,196 +0,0 @@
use super::*;
mod impls;
/// Provides SSZ encoding (serialization) via the `as_ssz_bytes(&self)` method.
///
/// See `examples/` for manual implementations or the crate root for implementations using
/// `#[derive(Encode)]`.
pub trait Encode {
/// Returns `true` if this object has a fixed-length.
///
/// I.e., there are no variable length items in this object or any of it's contained objects.
fn is_ssz_fixed_len() -> bool;
/// Append the encoding `self` to `buf`.
///
/// Note, variable length objects need only to append their "variable length" portion, they do
/// not need to provide their offset.
fn ssz_append(&self, buf: &mut Vec<u8>);
/// The number of bytes this object occupies in the fixed-length portion of the SSZ bytes.
///
/// By default, this is set to `BYTES_PER_LENGTH_OFFSET` which is suitable for variable length
/// objects, but not fixed-length objects. Fixed-length objects _must_ return a value which
/// represents their length.
fn ssz_fixed_len() -> usize {
BYTES_PER_LENGTH_OFFSET
}
/// Returns the size (in bytes) when `self` is serialized.
///
/// Returns the same value as `self.as_ssz_bytes().len()` but this method is significantly more
/// efficient.
fn ssz_bytes_len(&self) -> usize;
/// Returns the full-form encoding of this object.
///
/// The default implementation of this method should suffice for most cases.
fn as_ssz_bytes(&self) -> Vec<u8> {
let mut buf = vec![];
self.ssz_append(&mut buf);
buf
}
}
/// Allow for encoding an ordered series of distinct or indistinct objects as SSZ bytes.
///
/// **You must call `finalize(..)` after the final `append(..)` call** to ensure the bytes are
/// written to `buf`.
///
/// ## Example
///
/// Use `SszEncoder` to produce identical output to `foo.as_ssz_bytes()`:
///
/// ```rust
/// use ssz_derive::{Encode, Decode};
/// use ssz::{Decode, Encode, SszEncoder};
///
/// #[derive(PartialEq, Debug, Encode, Decode)]
/// struct Foo {
/// a: u64,
/// b: Vec<u16>,
/// }
///
/// fn ssz_encode_example() {
/// let foo = Foo {
/// a: 42,
/// b: vec![1, 3, 3, 7]
/// };
///
/// let mut buf: Vec<u8> = vec![];
/// let offset = <u64 as Encode>::ssz_fixed_len() + <Vec<u16> as Encode>::ssz_fixed_len();
///
/// let mut encoder = SszEncoder::container(&mut buf, offset);
///
/// encoder.append(&foo.a);
/// encoder.append(&foo.b);
///
/// encoder.finalize();
///
/// assert_eq!(foo.as_ssz_bytes(), buf);
/// }
///
/// ```
pub struct SszEncoder<'a> {
offset: usize,
buf: &'a mut Vec<u8>,
variable_bytes: Vec<u8>,
}
impl<'a> SszEncoder<'a> {
/// Instantiate a new encoder for encoding a SSZ container.
pub fn container(buf: &'a mut Vec<u8>, num_fixed_bytes: usize) -> Self {
buf.reserve(num_fixed_bytes);
Self {
offset: num_fixed_bytes,
buf,
variable_bytes: vec![],
}
}
/// Append some `item` to the SSZ bytes.
pub fn append<T: Encode>(&mut self, item: &T) {
self.append_parameterized(T::is_ssz_fixed_len(), |buf| item.ssz_append(buf))
}
/// Uses `ssz_append` to append the encoding of some item to the SSZ bytes.
pub fn append_parameterized<F>(&mut self, is_ssz_fixed_len: bool, ssz_append: F)
where
F: Fn(&mut Vec<u8>),
{
if is_ssz_fixed_len {
ssz_append(self.buf);
} else {
self.buf
.extend_from_slice(&encode_length(self.offset + self.variable_bytes.len()));
ssz_append(&mut self.variable_bytes);
}
}
/// Write the variable bytes to `self.bytes`.
///
/// This method must be called after the final `append(..)` call when serializing
/// variable-length items.
pub fn finalize(&mut self) -> &mut Vec<u8> {
self.buf.append(&mut self.variable_bytes);
self.buf
}
}
/// Encode `len` as a little-endian byte array of `BYTES_PER_LENGTH_OFFSET` length.
///
/// If `len` is larger than `2 ^ BYTES_PER_LENGTH_OFFSET`, a `debug_assert` is raised.
pub fn encode_length(len: usize) -> [u8; BYTES_PER_LENGTH_OFFSET] {
// Note: it is possible for `len` to be larger than what can be encoded in
// `BYTES_PER_LENGTH_OFFSET` bytes, triggering this debug assertion.
//
// These are the alternatives to using a `debug_assert` here:
//
// 1. Use `assert`.
// 2. Push an error to the caller (e.g., `Option` or `Result`).
// 3. Ignore it completely.
//
// I have avoided (1) because it's basically a choice between "produce invalid SSZ" or "kill
// the entire program". I figure it may be possible for an attacker to trigger this assert and
// take the program down -- I think producing invalid SSZ is a better option than this.
//
// I have avoided (2) because this error will need to be propagated upstream, making encoding a
// function which may fail. I don't think this is ergonomic and the upsides don't outweigh the
// downsides.
//
// I figure a `debug_assertion` is better than (3) as it will give us a change to detect the
// error during testing.
//
// If you have a different opinion, feel free to start an issue and tag @paulhauner.
debug_assert!(len <= MAX_LENGTH_VALUE);
let mut bytes = [0; BYTES_PER_LENGTH_OFFSET];
bytes.copy_from_slice(&len.to_le_bytes()[0..BYTES_PER_LENGTH_OFFSET]);
bytes
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_encode_length() {
assert_eq!(encode_length(0), [0; 4]);
assert_eq!(encode_length(1), [1, 0, 0, 0]);
assert_eq!(
encode_length(MAX_LENGTH_VALUE),
[255; BYTES_PER_LENGTH_OFFSET]
);
}
#[test]
#[should_panic]
#[cfg(debug_assertions)]
fn test_encode_length_above_max_debug_panics() {
encode_length(MAX_LENGTH_VALUE + 1);
}
#[test]
#[cfg(not(debug_assertions))]
fn test_encode_length_above_max_not_debug_does_not_panic() {
assert_eq!(&encode_length(MAX_LENGTH_VALUE + 1)[..], &[0; 4]);
}
}

View File

@ -1,633 +0,0 @@
use super::*;
use core::num::NonZeroUsize;
use ethereum_types::{H160, H256, U128, U256};
use smallvec::SmallVec;
use std::collections::{BTreeMap, BTreeSet};
use std::sync::Arc;
macro_rules! impl_encodable_for_uint {
($type: ident, $bit_size: expr) => {
impl Encode for $type {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
$bit_size / 8
}
fn ssz_bytes_len(&self) -> usize {
$bit_size / 8
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
buf.extend_from_slice(&self.to_le_bytes());
}
}
};
}
impl_encodable_for_uint!(u8, 8);
impl_encodable_for_uint!(u16, 16);
impl_encodable_for_uint!(u32, 32);
impl_encodable_for_uint!(u64, 64);
#[cfg(target_pointer_width = "32")]
impl_encodable_for_uint!(usize, 32);
#[cfg(target_pointer_width = "64")]
impl_encodable_for_uint!(usize, 64);
// Based on the `tuple_impls` macro from the standard library.
macro_rules! impl_encode_for_tuples {
($(
$Tuple:ident {
$(($idx:tt) -> $T:ident)+
}
)+) => {
$(
impl<$($T: Encode),+> Encode for ($($T,)+) {
fn is_ssz_fixed_len() -> bool {
$(
<$T as Encode>::is_ssz_fixed_len() &&
)*
true
}
fn ssz_fixed_len() -> usize {
if <Self as Encode>::is_ssz_fixed_len() {
$(
<$T as Encode>::ssz_fixed_len() +
)*
0
} else {
BYTES_PER_LENGTH_OFFSET
}
}
fn ssz_bytes_len(&self) -> usize {
if <Self as Encode>::is_ssz_fixed_len() {
<Self as Encode>::ssz_fixed_len()
} else {
let mut len = 0;
$(
len += if <$T as Encode>::is_ssz_fixed_len() {
<$T as Encode>::ssz_fixed_len()
} else {
BYTES_PER_LENGTH_OFFSET +
self.$idx.ssz_bytes_len()
};
)*
len
}
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
let offset = $(
<$T as Encode>::ssz_fixed_len() +
)*
0;
let mut encoder = SszEncoder::container(buf, offset);
$(
encoder.append(&self.$idx);
)*
encoder.finalize();
}
}
)+
}
}
impl_encode_for_tuples! {
Tuple2 {
(0) -> A
(1) -> B
}
Tuple3 {
(0) -> A
(1) -> B
(2) -> C
}
Tuple4 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
}
Tuple5 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
}
Tuple6 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
}
Tuple7 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
}
Tuple8 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
}
Tuple9 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
}
Tuple10 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
}
Tuple11 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
}
Tuple12 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
}
}
impl<T: Encode> Encode for Option<T> {
fn is_ssz_fixed_len() -> bool {
false
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
match self {
Option::None => {
let union_selector: u8 = 0u8;
buf.push(union_selector);
}
Option::Some(ref inner) => {
let union_selector: u8 = 1u8;
buf.push(union_selector);
inner.ssz_append(buf);
}
}
}
fn ssz_bytes_len(&self) -> usize {
match self {
Option::None => 1usize,
Option::Some(ref inner) => inner
.ssz_bytes_len()
.checked_add(1)
.expect("encoded length must be less than usize::max_value"),
}
}
}
impl<T: Encode> Encode for Arc<T> {
fn is_ssz_fixed_len() -> bool {
T::is_ssz_fixed_len()
}
fn ssz_fixed_len() -> usize {
T::ssz_fixed_len()
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
self.as_ref().ssz_append(buf)
}
fn ssz_bytes_len(&self) -> usize {
self.as_ref().ssz_bytes_len()
}
}
// Encode transparently through references.
impl<'a, T: Encode> Encode for &'a T {
fn is_ssz_fixed_len() -> bool {
T::is_ssz_fixed_len()
}
fn ssz_fixed_len() -> usize {
T::ssz_fixed_len()
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
T::ssz_append(self, buf)
}
fn ssz_bytes_len(&self) -> usize {
T::ssz_bytes_len(self)
}
}
/// Compute the encoded length of a vector-like sequence of `T`.
pub fn sequence_ssz_bytes_len<I, T>(iter: I) -> usize
where
I: Iterator<Item = T> + ExactSizeIterator,
T: Encode,
{
// Compute length before doing any iteration.
let length = iter.len();
if <T as Encode>::is_ssz_fixed_len() {
<T as Encode>::ssz_fixed_len() * length
} else {
let mut len = iter.map(|item| item.ssz_bytes_len()).sum();
len += BYTES_PER_LENGTH_OFFSET * length;
len
}
}
/// Encode a vector-like sequence of `T`.
pub fn sequence_ssz_append<I, T>(iter: I, buf: &mut Vec<u8>)
where
I: Iterator<Item = T> + ExactSizeIterator,
T: Encode,
{
if T::is_ssz_fixed_len() {
buf.reserve(T::ssz_fixed_len() * iter.len());
for item in iter {
item.ssz_append(buf);
}
} else {
let mut encoder = SszEncoder::container(buf, iter.len() * BYTES_PER_LENGTH_OFFSET);
for item in iter {
encoder.append(&item);
}
encoder.finalize();
}
}
macro_rules! impl_for_vec {
($type: ty) => {
impl<T: Encode> Encode for $type {
fn is_ssz_fixed_len() -> bool {
false
}
fn ssz_bytes_len(&self) -> usize {
sequence_ssz_bytes_len(self.iter())
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
sequence_ssz_append(self.iter(), buf)
}
}
};
}
impl_for_vec!(Vec<T>);
impl_for_vec!(SmallVec<[T; 1]>);
impl_for_vec!(SmallVec<[T; 2]>);
impl_for_vec!(SmallVec<[T; 3]>);
impl_for_vec!(SmallVec<[T; 4]>);
impl_for_vec!(SmallVec<[T; 5]>);
impl_for_vec!(SmallVec<[T; 6]>);
impl_for_vec!(SmallVec<[T; 7]>);
impl_for_vec!(SmallVec<[T; 8]>);
impl<K, V> Encode for BTreeMap<K, V>
where
K: Encode + Ord,
V: Encode,
{
fn is_ssz_fixed_len() -> bool {
false
}
fn ssz_bytes_len(&self) -> usize {
sequence_ssz_bytes_len(self.iter())
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
sequence_ssz_append(self.iter(), buf)
}
}
impl<T> Encode for BTreeSet<T>
where
T: Encode + Ord,
{
fn is_ssz_fixed_len() -> bool {
false
}
fn ssz_bytes_len(&self) -> usize {
sequence_ssz_bytes_len(self.iter())
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
sequence_ssz_append(self.iter(), buf)
}
}
impl Encode for bool {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
1
}
fn ssz_bytes_len(&self) -> usize {
1
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
buf.extend_from_slice(&(*self as u8).to_le_bytes());
}
}
impl Encode for NonZeroUsize {
fn is_ssz_fixed_len() -> bool {
<usize as Encode>::is_ssz_fixed_len()
}
fn ssz_fixed_len() -> usize {
<usize as Encode>::ssz_fixed_len()
}
fn ssz_bytes_len(&self) -> usize {
std::mem::size_of::<usize>()
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
self.get().ssz_append(buf)
}
}
impl Encode for H160 {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
20
}
fn ssz_bytes_len(&self) -> usize {
20
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
buf.extend_from_slice(self.as_bytes());
}
}
impl Encode for H256 {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
32
}
fn ssz_bytes_len(&self) -> usize {
32
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
buf.extend_from_slice(self.as_bytes());
}
}
impl Encode for U256 {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
32
}
fn ssz_bytes_len(&self) -> usize {
32
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
let n = <Self as Encode>::ssz_fixed_len();
let s = buf.len();
buf.resize(s + n, 0);
self.to_little_endian(&mut buf[s..]);
}
}
impl Encode for U128 {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
16
}
fn ssz_bytes_len(&self) -> usize {
16
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
let n = <Self as Encode>::ssz_fixed_len();
let s = buf.len();
buf.resize(s + n, 0);
self.to_little_endian(&mut buf[s..]);
}
}
macro_rules! impl_encodable_for_u8_array {
($len: expr) => {
impl Encode for [u8; $len] {
fn is_ssz_fixed_len() -> bool {
true
}
fn ssz_fixed_len() -> usize {
$len
}
fn ssz_bytes_len(&self) -> usize {
$len
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
buf.extend_from_slice(&self[..]);
}
}
};
}
impl_encodable_for_u8_array!(4);
impl_encodable_for_u8_array!(32);
impl_encodable_for_u8_array!(48);
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn vec_of_u8() {
let vec: Vec<u8> = vec![];
assert_eq!(vec.as_ssz_bytes(), vec![]);
let vec: Vec<u8> = vec![1];
assert_eq!(vec.as_ssz_bytes(), vec![1]);
let vec: Vec<u8> = vec![0, 1, 2, 3];
assert_eq!(vec.as_ssz_bytes(), vec![0, 1, 2, 3]);
}
#[test]
fn vec_of_vec_of_u8() {
let vec: Vec<Vec<u8>> = vec![];
assert_eq!(vec.as_ssz_bytes(), vec![]);
let vec: Vec<Vec<u8>> = vec![vec![]];
assert_eq!(vec.as_ssz_bytes(), vec![4, 0, 0, 0]);
let vec: Vec<Vec<u8>> = vec![vec![], vec![]];
assert_eq!(vec.as_ssz_bytes(), vec![8, 0, 0, 0, 8, 0, 0, 0]);
let vec: Vec<Vec<u8>> = vec![vec![0, 1, 2], vec![11, 22, 33]];
assert_eq!(
vec.as_ssz_bytes(),
vec![8, 0, 0, 0, 11, 0, 0, 0, 0, 1, 2, 11, 22, 33]
);
}
#[test]
fn ssz_encode_u8() {
assert_eq!(0_u8.as_ssz_bytes(), vec![0]);
assert_eq!(1_u8.as_ssz_bytes(), vec![1]);
assert_eq!(100_u8.as_ssz_bytes(), vec![100]);
assert_eq!(255_u8.as_ssz_bytes(), vec![255]);
}
#[test]
fn ssz_encode_u16() {
assert_eq!(1_u16.as_ssz_bytes(), vec![1, 0]);
assert_eq!(100_u16.as_ssz_bytes(), vec![100, 0]);
assert_eq!((1_u16 << 8).as_ssz_bytes(), vec![0, 1]);
assert_eq!(65535_u16.as_ssz_bytes(), vec![255, 255]);
}
#[test]
fn ssz_encode_u32() {
assert_eq!(1_u32.as_ssz_bytes(), vec![1, 0, 0, 0]);
assert_eq!(100_u32.as_ssz_bytes(), vec![100, 0, 0, 0]);
assert_eq!((1_u32 << 16).as_ssz_bytes(), vec![0, 0, 1, 0]);
assert_eq!((1_u32 << 24).as_ssz_bytes(), vec![0, 0, 0, 1]);
assert_eq!((!0_u32).as_ssz_bytes(), vec![255, 255, 255, 255]);
}
#[test]
fn ssz_encode_u64() {
assert_eq!(1_u64.as_ssz_bytes(), vec![1, 0, 0, 0, 0, 0, 0, 0]);
assert_eq!(
(!0_u64).as_ssz_bytes(),
vec![255, 255, 255, 255, 255, 255, 255, 255]
);
}
#[test]
fn ssz_encode_usize() {
assert_eq!(1_usize.as_ssz_bytes(), vec![1, 0, 0, 0, 0, 0, 0, 0]);
assert_eq!(
(!0_usize).as_ssz_bytes(),
vec![255, 255, 255, 255, 255, 255, 255, 255]
);
}
#[test]
fn ssz_encode_option_u8() {
let opt: Option<u8> = None;
assert_eq!(opt.as_ssz_bytes(), vec![0]);
let opt: Option<u8> = Some(2);
assert_eq!(opt.as_ssz_bytes(), vec![1, 2]);
}
#[test]
fn ssz_encode_bool() {
assert_eq!(true.as_ssz_bytes(), vec![1]);
assert_eq!(false.as_ssz_bytes(), vec![0]);
}
#[test]
fn ssz_encode_h256() {
assert_eq!(H256::from(&[0; 32]).as_ssz_bytes(), vec![0; 32]);
assert_eq!(H256::from(&[1; 32]).as_ssz_bytes(), vec![1; 32]);
let bytes = vec![
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0,
];
assert_eq!(H256::from_slice(&bytes).as_ssz_bytes(), bytes);
}
#[test]
fn ssz_encode_u8_array_4() {
assert_eq!([0, 0, 0, 0].as_ssz_bytes(), vec![0; 4]);
assert_eq!([1, 0, 0, 0].as_ssz_bytes(), vec![1, 0, 0, 0]);
assert_eq!([1, 2, 3, 4].as_ssz_bytes(), vec![1, 2, 3, 4]);
}
#[test]
fn tuple() {
assert_eq!((10u8, 11u8).as_ssz_bytes(), vec![10, 11]);
assert_eq!((10u32, 11u8).as_ssz_bytes(), vec![10, 0, 0, 0, 11]);
assert_eq!((10u8, 11u8, 12u8).as_ssz_bytes(), vec![10, 11, 12]);
}
}

View File

@ -1,265 +0,0 @@
//! Provides a "legacy" version of SSZ encoding for `Option<T> where T: Encode + Decode`.
//!
//! The SSZ specification changed in 2021 to use a 1-byte union selector, instead of a 4-byte one
//! which was used in the Lighthouse database.
//!
//! Users can use the `four_byte_option_impl` macro to define a module that can be used with the
//! `#[ssz(with = "module")]`.
//!
//! ## Example
//!
//! ```rust
//! use ssz_derive::{Encode, Decode};
//! use ssz::four_byte_option_impl;
//!
//! four_byte_option_impl!(impl_for_u64, u64);
//!
//! #[derive(Encode, Decode)]
//! struct Foo {
//! #[ssz(with = "impl_for_u64")]
//! a: Option<u64>,
//! }
//! ```
use crate::*;
#[macro_export]
macro_rules! four_byte_option_impl {
($mod_name: ident, $type: ty) => {
#[allow(dead_code)]
mod $mod_name {
use super::*;
pub mod encode {
use super::*;
#[allow(unused_imports)]
use ssz::*;
pub fn is_ssz_fixed_len() -> bool {
false
}
pub fn ssz_fixed_len() -> usize {
BYTES_PER_LENGTH_OFFSET
}
pub fn ssz_bytes_len(opt: &Option<$type>) -> usize {
if let Some(some) = opt {
let len = if <$type as Encode>::is_ssz_fixed_len() {
<$type as Encode>::ssz_fixed_len()
} else {
<$type as Encode>::ssz_bytes_len(some)
};
len + BYTES_PER_LENGTH_OFFSET
} else {
BYTES_PER_LENGTH_OFFSET
}
}
pub fn ssz_append(opt: &Option<$type>, buf: &mut Vec<u8>) {
match opt {
None => buf.extend_from_slice(&legacy::encode_four_byte_union_selector(0)),
Some(t) => {
buf.extend_from_slice(&legacy::encode_four_byte_union_selector(1));
t.ssz_append(buf);
}
}
}
pub fn as_ssz_bytes(opt: &Option<$type>) -> Vec<u8> {
let mut buf = vec![];
ssz_append(opt, &mut buf);
buf
}
}
pub mod decode {
use super::*;
#[allow(unused_imports)]
use ssz::*;
pub fn is_ssz_fixed_len() -> bool {
false
}
pub fn ssz_fixed_len() -> usize {
BYTES_PER_LENGTH_OFFSET
}
pub fn from_ssz_bytes(bytes: &[u8]) -> Result<Option<$type>, DecodeError> {
if bytes.len() < BYTES_PER_LENGTH_OFFSET {
return Err(DecodeError::InvalidByteLength {
len: bytes.len(),
expected: BYTES_PER_LENGTH_OFFSET,
});
}
let (index_bytes, value_bytes) = bytes.split_at(BYTES_PER_LENGTH_OFFSET);
let index = legacy::read_four_byte_union_selector(index_bytes)?;
if index == 0 {
Ok(None)
} else if index == 1 {
Ok(Some(<$type as ssz::Decode>::from_ssz_bytes(value_bytes)?))
} else {
Err(DecodeError::BytesInvalid(format!(
"{} is not a valid union index for Option<T>",
index
)))
}
}
}
}
};
}
pub fn encode_four_byte_union_selector(selector: usize) -> [u8; BYTES_PER_LENGTH_OFFSET] {
encode_length(selector)
}
pub fn read_four_byte_union_selector(bytes: &[u8]) -> Result<usize, DecodeError> {
read_offset(bytes)
}
#[cfg(test)]
mod test {
use super::*;
use crate as ssz;
use ssz_derive::{Decode, Encode};
type VecU16 = Vec<u16>;
four_byte_option_impl!(impl_u16, u16);
four_byte_option_impl!(impl_vec_u16, VecU16);
#[test]
fn ssz_encode_option_u16() {
let item = Some(65535_u16);
let bytes = vec![1, 0, 0, 0, 255, 255];
assert_eq!(impl_u16::encode::as_ssz_bytes(&item), bytes);
assert_eq!(impl_u16::decode::from_ssz_bytes(&bytes).unwrap(), item);
let item = None;
let bytes = vec![0, 0, 0, 0];
assert_eq!(impl_u16::encode::as_ssz_bytes(&item), bytes);
assert_eq!(impl_u16::decode::from_ssz_bytes(&bytes).unwrap(), None);
}
#[test]
fn ssz_encode_option_vec_u16() {
let item = Some(vec![0_u16, 1]);
let bytes = vec![1, 0, 0, 0, 0, 0, 1, 0];
assert_eq!(impl_vec_u16::encode::as_ssz_bytes(&item), bytes);
assert_eq!(impl_vec_u16::decode::from_ssz_bytes(&bytes).unwrap(), item);
let item = None;
let bytes = vec![0, 0, 0, 0];
assert_eq!(impl_vec_u16::encode::as_ssz_bytes(&item), bytes);
assert_eq!(impl_vec_u16::decode::from_ssz_bytes(&bytes).unwrap(), item);
}
fn round_trip<T: Encode + Decode + std::fmt::Debug + PartialEq>(items: Vec<T>) {
for item in items {
let encoded = &item.as_ssz_bytes();
assert_eq!(item.ssz_bytes_len(), encoded.len());
assert_eq!(T::from_ssz_bytes(encoded), Ok(item));
}
}
#[derive(Debug, PartialEq, Encode, Decode)]
struct TwoVariableLenOptions {
a: u16,
#[ssz(with = "impl_u16")]
b: Option<u16>,
#[ssz(with = "impl_vec_u16")]
c: Option<Vec<u16>>,
#[ssz(with = "impl_vec_u16")]
d: Option<Vec<u16>>,
}
#[test]
#[allow(clippy::zero_prefixed_literal)]
fn two_variable_len_options_encoding() {
let s = TwoVariableLenOptions {
a: 42,
b: None,
c: Some(vec![0]),
d: None,
};
let bytes = vec![
// 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
// | option<u16> | offset | offset | option<u16 | 1st list
42, 00, 14, 00, 00, 00, 18, 00, 00, 00, 24, 00, 00, 00, 00, 00, 00, 00, 01, 00, 00, 00,
// 23 24 25 26 27
// | 2nd list
00, 00, 00, 00, 00, 00,
];
assert_eq!(s.as_ssz_bytes(), bytes);
}
#[test]
fn two_variable_len_options_round_trip() {
let vec: Vec<TwoVariableLenOptions> = vec![
TwoVariableLenOptions {
a: 42,
b: Some(12),
c: Some(vec![0]),
d: Some(vec![1]),
},
TwoVariableLenOptions {
a: 42,
b: Some(12),
c: Some(vec![0]),
d: None,
},
TwoVariableLenOptions {
a: 42,
b: None,
c: Some(vec![0]),
d: None,
},
TwoVariableLenOptions {
a: 42,
b: None,
c: None,
d: None,
},
];
round_trip(vec);
}
#[test]
fn tuple_u8_u16() {
let vec: Vec<(u8, u16)> = vec![
(0, 0),
(0, 1),
(1, 0),
(u8::max_value(), u16::max_value()),
(0, u16::max_value()),
(u8::max_value(), 0),
(42, 12301),
];
round_trip(vec);
}
#[test]
fn tuple_vec_vec() {
let vec: Vec<(u64, Vec<u8>, Vec<Vec<u16>>)> = vec![
(0, vec![], vec![vec![]]),
(99, vec![101], vec![vec![], vec![]]),
(
42,
vec![12, 13, 14],
vec![vec![99, 98, 97, 96], vec![42, 44, 46, 48, 50]],
),
];
round_trip(vec);
}
}

View File

@ -1,71 +0,0 @@
//! Provides encoding (serialization) and decoding (deserialization) in the SimpleSerialize (SSZ)
//! format designed for use in Ethereum 2.0.
//!
//! Adheres to the Ethereum 2.0 [SSZ
//! specification](https://github.com/ethereum/eth2.0-specs/blob/v0.12.1/ssz/simple-serialize.md)
//! at v0.12.1.
//!
//! ## Example
//!
//! ```rust
//! use ssz_derive::{Encode, Decode};
//! use ssz::{Decode, Encode};
//!
//! #[derive(PartialEq, Debug, Encode, Decode)]
//! struct Foo {
//! a: u64,
//! b: Vec<u16>,
//! }
//!
//! fn ssz_encode_decode_example() {
//! let foo = Foo {
//! a: 42,
//! b: vec![1, 3, 3, 7]
//! };
//!
//! let ssz_bytes: Vec<u8> = foo.as_ssz_bytes();
//!
//! let decoded_foo = Foo::from_ssz_bytes(&ssz_bytes).unwrap();
//!
//! assert_eq!(foo, decoded_foo);
//! }
//!
//! ```
//!
//! See `examples/` for manual implementations of the `Encode` and `Decode` traits.
mod decode;
mod encode;
pub mod legacy;
mod union_selector;
pub use decode::{
impls::decode_list_of_variable_length_items, read_offset, split_union_bytes,
try_from_iter::TryFromIter, Decode, DecodeError, SszDecoder, SszDecoderBuilder,
};
pub use encode::{encode_length, Encode, SszEncoder};
pub use union_selector::UnionSelector;
/// The number of bytes used to represent an offset.
pub const BYTES_PER_LENGTH_OFFSET: usize = 4;
/// The maximum value that can be represented using `BYTES_PER_LENGTH_OFFSET`.
#[cfg(target_pointer_width = "32")]
pub const MAX_LENGTH_VALUE: usize = (std::u32::MAX >> (8 * (4 - BYTES_PER_LENGTH_OFFSET))) as usize;
#[cfg(target_pointer_width = "64")]
pub const MAX_LENGTH_VALUE: usize = (std::u64::MAX >> (8 * (8 - BYTES_PER_LENGTH_OFFSET))) as usize;
/// The number of bytes used to indicate the variant of a union.
pub const BYTES_PER_UNION_SELECTOR: usize = 1;
/// The highest possible union selector value (higher values are reserved for backwards compatible
/// extensions).
pub const MAX_UNION_SELECTOR: u8 = 127;
/// Convenience function to SSZ encode an object supporting ssz::Encode.
///
/// Equivalent to `val.as_ssz_bytes()`.
pub fn ssz_encode<T>(val: &T) -> Vec<u8>
where
T: Encode,
{
val.as_ssz_bytes()
}

View File

@ -1,29 +0,0 @@
use crate::*;
/// Provides the one-byte "selector" from the SSZ union specification:
///
/// https://github.com/ethereum/consensus-specs/blob/v1.1.0-beta.3/ssz/simple-serialize.md#union
#[derive(Copy, Clone)]
pub struct UnionSelector(u8);
impl From<UnionSelector> for u8 {
fn from(union_selector: UnionSelector) -> u8 {
union_selector.0
}
}
impl PartialEq<u8> for UnionSelector {
fn eq(&self, other: &u8) -> bool {
self.0 == *other
}
}
impl UnionSelector {
/// Instantiate `self`, returning an error if `selector > MAX_UNION_SELECTOR`.
pub fn new(selector: u8) -> Result<Self, DecodeError> {
Some(selector)
.filter(|_| selector <= MAX_UNION_SELECTOR)
.map(Self)
.ok_or(DecodeError::UnionSelectorInvalid(selector))
}
}

View File

@ -1,390 +0,0 @@
use ethereum_types::H256;
use ssz::{Decode, DecodeError, Encode};
use ssz_derive::{Decode, Encode};
mod round_trip {
use super::*;
use std::collections::BTreeMap;
use std::iter::FromIterator;
fn round_trip<T: Encode + Decode + std::fmt::Debug + PartialEq>(items: Vec<T>) {
for item in items {
let encoded = &item.as_ssz_bytes();
assert_eq!(item.ssz_bytes_len(), encoded.len());
assert_eq!(T::from_ssz_bytes(encoded), Ok(item));
}
}
#[test]
fn bool() {
let items: Vec<bool> = vec![true, false];
round_trip(items);
}
#[test]
fn option_u16() {
let items: Vec<Option<u16>> = vec![None, Some(2u16)];
round_trip(items);
}
#[test]
fn u8_array_4() {
let items: Vec<[u8; 4]> = vec![[0, 0, 0, 0], [1, 0, 0, 0], [1, 2, 3, 4], [1, 2, 0, 4]];
round_trip(items);
}
#[test]
fn h256() {
let items: Vec<H256> = vec![H256::zero(), H256::from([1; 32]), H256::random()];
round_trip(items);
}
#[test]
fn vec_of_h256() {
let items: Vec<Vec<H256>> = vec![
vec![],
vec![H256::zero(), H256::from([1; 32]), H256::random()],
];
round_trip(items);
}
#[test]
fn option_vec_h256() {
let items: Vec<Option<Vec<H256>>> = vec![
None,
Some(vec![]),
Some(vec![H256::zero(), H256::from([1; 32]), H256::random()]),
];
round_trip(items);
}
#[test]
fn vec_u16() {
let items: Vec<Vec<u16>> = vec![
vec![],
vec![255],
vec![0, 1, 2],
vec![100; 64],
vec![255, 0, 255],
];
round_trip(items);
}
#[test]
fn vec_of_vec_u16() {
let items: Vec<Vec<Vec<u16>>> = vec![
vec![],
vec![vec![]],
vec![vec![1, 2, 3]],
vec![vec![], vec![]],
vec![vec![], vec![1, 2, 3]],
vec![vec![1, 2, 3], vec![1, 2, 3]],
vec![vec![1, 2, 3], vec![], vec![1, 2, 3]],
vec![vec![], vec![], vec![1, 2, 3]],
vec![vec![], vec![1], vec![1, 2, 3]],
vec![vec![], vec![1], vec![1, 2, 3]],
];
round_trip(items);
}
#[derive(Debug, PartialEq, Encode, Decode)]
struct FixedLen {
a: u16,
b: u64,
c: u32,
}
#[test]
#[allow(clippy::zero_prefixed_literal)]
fn fixed_len_struct_encoding() {
let items: Vec<FixedLen> = vec![
FixedLen { a: 0, b: 0, c: 0 },
FixedLen { a: 1, b: 1, c: 1 },
FixedLen { a: 1, b: 0, c: 1 },
];
let expected_encodings = vec![
// | u16--| u64----------------------------| u32----------|
vec![00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00],
vec![01, 00, 01, 00, 00, 00, 00, 00, 00, 00, 01, 00, 00, 00],
vec![01, 00, 00, 00, 00, 00, 00, 00, 00, 00, 01, 00, 00, 00],
];
for i in 0..items.len() {
assert_eq!(
items[i].as_ssz_bytes(),
expected_encodings[i],
"Failed on {}",
i
);
}
}
#[test]
fn fixed_len_excess_bytes() {
let fixed = FixedLen { a: 1, b: 2, c: 3 };
let mut bytes = fixed.as_ssz_bytes();
bytes.append(&mut vec![0]);
assert_eq!(
FixedLen::from_ssz_bytes(&bytes),
Err(DecodeError::InvalidByteLength {
len: 15,
expected: 14,
})
);
}
#[test]
fn vec_of_fixed_len_struct() {
let items: Vec<FixedLen> = vec![
FixedLen { a: 0, b: 0, c: 0 },
FixedLen { a: 1, b: 1, c: 1 },
FixedLen { a: 1, b: 0, c: 1 },
];
round_trip(items);
}
#[derive(Debug, PartialEq, Encode, Decode)]
struct VariableLen {
a: u16,
b: Vec<u16>,
c: u32,
}
#[test]
#[allow(clippy::zero_prefixed_literal)]
fn offset_into_fixed_bytes() {
let bytes = vec![
// 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
// | offset | u32 | variable
01, 00, 09, 00, 00, 00, 01, 00, 00, 00, 00, 00, 01, 00, 02, 00,
];
assert_eq!(
VariableLen::from_ssz_bytes(&bytes),
Err(DecodeError::OffsetIntoFixedPortion(9))
);
}
#[test]
fn variable_len_excess_bytes() {
let variable = VariableLen {
a: 1,
b: vec![2],
c: 3,
};
let mut bytes = variable.as_ssz_bytes();
bytes.append(&mut vec![0]);
// The error message triggered is not so helpful, it's caught by a side-effect. Just
// checking there is _some_ error is fine.
assert!(VariableLen::from_ssz_bytes(&bytes).is_err());
}
#[test]
#[allow(clippy::zero_prefixed_literal)]
fn first_offset_skips_byte() {
let bytes = vec![
// 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
// | offset | u32 | variable
01, 00, 11, 00, 00, 00, 01, 00, 00, 00, 00, 00, 01, 00, 02, 00,
];
assert_eq!(
VariableLen::from_ssz_bytes(&bytes),
Err(DecodeError::OffsetSkipsVariableBytes(11))
);
}
#[test]
#[allow(clippy::zero_prefixed_literal)]
fn variable_len_struct_encoding() {
let items: Vec<VariableLen> = vec![
VariableLen {
a: 0,
b: vec![],
c: 0,
},
VariableLen {
a: 1,
b: vec![0],
c: 1,
},
VariableLen {
a: 1,
b: vec![0, 1, 2],
c: 1,
},
];
let expected_encodings = vec![
// 00..................................09
// | u16--| vec offset-----| u32------------| vec payload --------|
vec![00, 00, 10, 00, 00, 00, 00, 00, 00, 00],
vec![01, 00, 10, 00, 00, 00, 01, 00, 00, 00, 00, 00],
vec![
01, 00, 10, 00, 00, 00, 01, 00, 00, 00, 00, 00, 01, 00, 02, 00,
],
];
for i in 0..items.len() {
assert_eq!(
items[i].as_ssz_bytes(),
expected_encodings[i],
"Failed on {}",
i
);
}
}
#[test]
fn vec_of_variable_len_struct() {
let items: Vec<VariableLen> = vec![
VariableLen {
a: 0,
b: vec![],
c: 0,
},
VariableLen {
a: 255,
b: vec![0, 1, 2, 3],
c: 99,
},
VariableLen {
a: 255,
b: vec![0],
c: 99,
},
VariableLen {
a: 50,
b: vec![0],
c: 0,
},
];
round_trip(items);
}
#[derive(Debug, PartialEq, Encode, Decode)]
struct ThreeVariableLen {
a: u16,
b: Vec<u16>,
c: Vec<u16>,
d: Vec<u16>,
}
#[test]
fn three_variable_len() {
let vec: Vec<ThreeVariableLen> = vec![ThreeVariableLen {
a: 42,
b: vec![0],
c: vec![1],
d: vec![2],
}];
round_trip(vec);
}
#[test]
#[allow(clippy::zero_prefixed_literal)]
fn offsets_decreasing() {
let bytes = vec![
// 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
// | offset | offset | offset | variable
01, 00, 14, 00, 00, 00, 15, 00, 00, 00, 14, 00, 00, 00, 00, 00,
];
assert_eq!(
ThreeVariableLen::from_ssz_bytes(&bytes),
Err(DecodeError::OffsetsAreDecreasing(14))
);
}
#[test]
fn tuple_u8_u16() {
let vec: Vec<(u8, u16)> = vec![
(0, 0),
(0, 1),
(1, 0),
(u8::max_value(), u16::max_value()),
(0, u16::max_value()),
(u8::max_value(), 0),
(42, 12301),
];
round_trip(vec);
}
#[test]
fn tuple_vec_vec() {
let vec: Vec<(u64, Vec<u8>, Vec<Vec<u16>>)> = vec![
(0, vec![], vec![vec![]]),
(99, vec![101], vec![vec![], vec![]]),
(
42,
vec![12, 13, 14],
vec![vec![99, 98, 97, 96], vec![42, 44, 46, 48, 50]],
),
];
round_trip(vec);
}
#[test]
fn btree_map_fixed() {
let data = vec![
BTreeMap::new(),
BTreeMap::from_iter(vec![(0u8, 0u16), (1, 2), (2, 4), (4, 6)]),
];
round_trip(data);
}
#[test]
fn btree_map_variable_value() {
let data = vec![
BTreeMap::new(),
BTreeMap::from_iter(vec![
(
0u64,
ThreeVariableLen {
a: 1,
b: vec![3, 5, 7],
c: vec![],
d: vec![0, 0],
},
),
(
1,
ThreeVariableLen {
a: 99,
b: vec![1],
c: vec![2, 3, 4, 5, 6, 7, 8, 9, 10],
d: vec![4, 5, 6, 7, 8],
},
),
(
2,
ThreeVariableLen {
a: 0,
b: vec![],
c: vec![],
d: vec![],
},
),
]),
];
round_trip(data);
}
}

View File

@ -1,20 +0,0 @@
[package]
name = "eth2_ssz_derive"
version = "0.3.1"
authors = ["Paul Hauner <paul@sigmaprime.io>"]
edition = "2021"
description = "Procedural derive macros to accompany the eth2_ssz crate."
license = "Apache-2.0"
[lib]
name = "ssz_derive"
proc-macro = true
[dependencies]
syn = "1.0.42"
proc-macro2 = "1.0.23"
quote = "1.0.7"
darling = "0.13.0"
[dev-dependencies]
eth2_ssz = "0.4.1"

View File

@ -1,981 +0,0 @@
//! Provides procedural derive macros for the `Encode` and `Decode` traits of the `eth2_ssz` crate.
//!
//! ## Attributes
//!
//! The following struct/enum attributes are available:
//!
//! - `#[ssz(enum_behaviour = "union")]`: encodes and decodes an `enum` with a one-byte variant selector.
//! - `#[ssz(enum_behaviour = "transparent")]`: allows encoding an `enum` by serializing only the
//! value whilst ignoring outermost the `enum`.
//! - `#[ssz(struct_behaviour = "container")]`: encodes and decodes the `struct` as an SSZ
//! "container".
//! - `#[ssz(struct_behaviour = "transparent")]`: encodes and decodes a `struct` with exactly one
//! non-skipped field as if the outermost `struct` does not exist.
//!
//! The following field attributes are available:
//!
//! - `#[ssz(with = "module")]`: uses the methods in `module` to implement `ssz::Encode` and
//! `ssz::Decode`. This is useful when it's not possible to create an `impl` for that type
//! (e.g. the type is defined in another crate).
//! - `#[ssz(skip_serializing)]`: this field will not be included in the serialized SSZ vector.
//! - `#[ssz(skip_deserializing)]`: this field will not be expected in the serialized
//! SSZ vector and it will be initialized from a `Default` implementation.
//!
//! ## Examples
//!
//! ### Structs
//!
//! ```rust
//! use ssz::{Encode, Decode};
//! use ssz_derive::{Encode, Decode};
//!
//! /// Represented as an SSZ "list" wrapped in an SSZ "container".
//! #[derive(Debug, PartialEq, Encode, Decode)]
//! #[ssz(struct_behaviour = "container")] // "container" is the default behaviour
//! struct TypicalStruct {
//! foo: Vec<u8>
//! }
//!
//! assert_eq!(
//! TypicalStruct { foo: vec![42] }.as_ssz_bytes(),
//! vec![4, 0, 0, 0, 42]
//! );
//!
//! assert_eq!(
//! TypicalStruct::from_ssz_bytes(&[4, 0, 0, 0, 42]).unwrap(),
//! TypicalStruct { foo: vec![42] },
//! );
//!
//! /// Represented as an SSZ "list" *without* an SSZ "container".
//! #[derive(Encode, Decode)]
//! #[ssz(struct_behaviour = "transparent")]
//! struct WrapperStruct {
//! foo: Vec<u8>
//! }
//!
//! assert_eq!(
//! WrapperStruct { foo: vec![42] }.as_ssz_bytes(),
//! vec![42]
//! );
//!
//! /// Represented as an SSZ "list" *without* an SSZ "container". The `bar` byte is ignored.
//! #[derive(Debug, PartialEq, Encode, Decode)]
//! #[ssz(struct_behaviour = "transparent")]
//! struct WrapperStructSkippedField {
//! foo: Vec<u8>,
//! #[ssz(skip_serializing, skip_deserializing)]
//! bar: u8,
//! }
//!
//! assert_eq!(
//! WrapperStructSkippedField { foo: vec![42], bar: 99 }.as_ssz_bytes(),
//! vec![42]
//! );
//! assert_eq!(
//! WrapperStructSkippedField::from_ssz_bytes(&[42]).unwrap(),
//! WrapperStructSkippedField { foo: vec![42], bar: 0 }
//! );
//!
//! /// Represented as an SSZ "list" *without* an SSZ "container".
//! #[derive(Encode, Decode)]
//! #[ssz(struct_behaviour = "transparent")]
//! struct NewType(Vec<u8>);
//!
//! assert_eq!(
//! NewType(vec![42]).as_ssz_bytes(),
//! vec![42]
//! );
//!
//! /// Represented as an SSZ "list" *without* an SSZ "container". The `bar` byte is ignored.
//! #[derive(Debug, PartialEq, Encode, Decode)]
//! #[ssz(struct_behaviour = "transparent")]
//! struct NewTypeSkippedField(Vec<u8>, #[ssz(skip_serializing, skip_deserializing)] u8);
//!
//! assert_eq!(
//! NewTypeSkippedField(vec![42], 99).as_ssz_bytes(),
//! vec![42]
//! );
//! assert_eq!(
//! NewTypeSkippedField::from_ssz_bytes(&[42]).unwrap(),
//! NewTypeSkippedField(vec![42], 0)
//! );
//! ```
//!
//! ### Enums
//!
//! ```rust
//! use ssz::{Encode, Decode};
//! use ssz_derive::{Encode, Decode};
//!
//! /// Represented as an SSZ "union".
//! #[derive(Debug, PartialEq, Encode, Decode)]
//! #[ssz(enum_behaviour = "union")]
//! enum UnionEnum {
//! Foo(u8),
//! Bar(Vec<u8>),
//! }
//!
//! assert_eq!(
//! UnionEnum::Foo(42).as_ssz_bytes(),
//! vec![0, 42]
//! );
//! assert_eq!(
//! UnionEnum::from_ssz_bytes(&[1, 42, 42]).unwrap(),
//! UnionEnum::Bar(vec![42, 42]),
//! );
//!
//! /// Represented as only the value in the enum variant.
//! #[derive(Debug, PartialEq, Encode)]
//! #[ssz(enum_behaviour = "transparent")]
//! enum TransparentEnum {
//! Foo(u8),
//! Bar(Vec<u8>),
//! }
//!
//! assert_eq!(
//! TransparentEnum::Foo(42).as_ssz_bytes(),
//! vec![42]
//! );
//! assert_eq!(
//! TransparentEnum::Bar(vec![42, 42]).as_ssz_bytes(),
//! vec![42, 42]
//! );
//! ```
use darling::{FromDeriveInput, FromMeta};
use proc_macro::TokenStream;
use quote::quote;
use std::convert::TryInto;
use syn::{parse_macro_input, DataEnum, DataStruct, DeriveInput, Ident};
/// The highest possible union selector value (higher values are reserved for backwards compatible
/// extensions).
const MAX_UNION_SELECTOR: u8 = 127;
const ENUM_TRANSPARENT: &str = "transparent";
const ENUM_UNION: &str = "union";
const NO_ENUM_BEHAVIOUR_ERROR: &str = "enums require an \"enum_behaviour\" attribute with \
a \"transparent\" or \"union\" value, e.g., #[ssz(enum_behaviour = \"transparent\")]";
#[derive(Debug, FromDeriveInput)]
#[darling(attributes(ssz))]
struct StructOpts {
#[darling(default)]
enum_behaviour: Option<String>,
#[darling(default)]
struct_behaviour: Option<String>,
}
/// Field-level configuration.
#[derive(Debug, Default, FromMeta)]
struct FieldOpts {
#[darling(default)]
with: Option<Ident>,
#[darling(default)]
skip_serializing: bool,
#[darling(default)]
skip_deserializing: bool,
}
enum Procedure<'a> {
Struct {
data: &'a syn::DataStruct,
behaviour: StructBehaviour,
},
Enum {
data: &'a syn::DataEnum,
behaviour: EnumBehaviour,
},
}
enum StructBehaviour {
Container,
Transparent,
}
enum EnumBehaviour {
Union,
Transparent,
}
impl<'a> Procedure<'a> {
fn read(item: &'a DeriveInput) -> Self {
let opts = StructOpts::from_derive_input(item).unwrap();
match &item.data {
syn::Data::Struct(data) => {
if opts.enum_behaviour.is_some() {
panic!("cannot use \"enum_behaviour\" for a struct");
}
match opts.struct_behaviour.as_deref() {
Some("container") | None => Procedure::Struct {
data,
behaviour: StructBehaviour::Container,
},
Some("transparent") => Procedure::Struct {
data,
behaviour: StructBehaviour::Transparent,
},
Some(other) => panic!(
"{} is not a valid struct behaviour, use \"container\" or \"transparent\"",
other
),
}
}
syn::Data::Enum(data) => {
if opts.struct_behaviour.is_some() {
panic!("cannot use \"struct_behaviour\" for an enum");
}
match opts.enum_behaviour.as_deref() {
Some("union") => Procedure::Enum {
data,
behaviour: EnumBehaviour::Union,
},
Some("transparent") => Procedure::Enum {
data,
behaviour: EnumBehaviour::Transparent,
},
Some(other) => panic!(
"{} is not a valid enum behaviour, use \"container\" or \"transparent\"",
other
),
None => panic!("{}", NO_ENUM_BEHAVIOUR_ERROR),
}
}
_ => panic!("ssz_derive only supports structs and enums"),
}
}
}
fn parse_ssz_fields(
struct_data: &syn::DataStruct,
) -> Vec<(&syn::Type, Option<&syn::Ident>, FieldOpts)> {
struct_data
.fields
.iter()
.map(|field| {
let ty = &field.ty;
let ident = field.ident.as_ref();
let field_opts_candidates = field
.attrs
.iter()
.filter(|attr| attr.path.get_ident().map_or(false, |ident| *ident == "ssz"))
.collect::<Vec<_>>();
if field_opts_candidates.len() > 1 {
panic!("more than one field-level \"ssz\" attribute provided")
}
let field_opts = field_opts_candidates
.first()
.map(|attr| {
let meta = attr.parse_meta().unwrap();
FieldOpts::from_meta(&meta).unwrap()
})
.unwrap_or_default();
(ty, ident, field_opts)
})
.collect()
}
/// Implements `ssz::Encode` for some `struct` or `enum`.
#[proc_macro_derive(Encode, attributes(ssz))]
pub fn ssz_encode_derive(input: TokenStream) -> TokenStream {
let item = parse_macro_input!(input as DeriveInput);
let procedure = Procedure::read(&item);
match procedure {
Procedure::Struct { data, behaviour } => match behaviour {
StructBehaviour::Transparent => ssz_encode_derive_struct_transparent(&item, data),
StructBehaviour::Container => ssz_encode_derive_struct(&item, data),
},
Procedure::Enum { data, behaviour } => match behaviour {
EnumBehaviour::Transparent => ssz_encode_derive_enum_transparent(&item, data),
EnumBehaviour::Union => ssz_encode_derive_enum_union(&item, data),
},
}
}
/// Derive `ssz::Encode` for a struct.
///
/// Fields are encoded in the order they are defined.
///
/// ## Field attributes
///
/// - `#[ssz(skip_serializing)]`: the field will not be serialized.
fn ssz_encode_derive_struct(derive_input: &DeriveInput, struct_data: &DataStruct) -> TokenStream {
let name = &derive_input.ident;
let (impl_generics, ty_generics, where_clause) = &derive_input.generics.split_for_impl();
let field_is_ssz_fixed_len = &mut vec![];
let field_fixed_len = &mut vec![];
let field_ssz_bytes_len = &mut vec![];
let field_encoder_append = &mut vec![];
for (ty, ident, field_opts) in parse_ssz_fields(struct_data) {
if field_opts.skip_serializing {
continue;
}
let ident = match ident {
Some(ref ident) => ident,
_ => panic!(
"#[ssz(struct_behaviour = \"container\")] only supports named struct fields."
),
};
if let Some(module) = field_opts.with {
let module = quote! { #module::encode };
field_is_ssz_fixed_len.push(quote! { #module::is_ssz_fixed_len() });
field_fixed_len.push(quote! { #module::ssz_fixed_len() });
field_ssz_bytes_len.push(quote! { #module::ssz_bytes_len(&self.#ident) });
field_encoder_append.push(quote! {
encoder.append_parameterized(
#module::is_ssz_fixed_len(),
|buf| #module::ssz_append(&self.#ident, buf)
)
});
} else {
field_is_ssz_fixed_len.push(quote! { <#ty as ssz::Encode>::is_ssz_fixed_len() });
field_fixed_len.push(quote! { <#ty as ssz::Encode>::ssz_fixed_len() });
field_ssz_bytes_len.push(quote! { self.#ident.ssz_bytes_len() });
field_encoder_append.push(quote! { encoder.append(&self.#ident) });
}
}
let output = quote! {
impl #impl_generics ssz::Encode for #name #ty_generics #where_clause {
fn is_ssz_fixed_len() -> bool {
#(
#field_is_ssz_fixed_len &&
)*
true
}
fn ssz_fixed_len() -> usize {
if <Self as ssz::Encode>::is_ssz_fixed_len() {
let mut len: usize = 0;
#(
len = len
.checked_add(#field_fixed_len)
.expect("encode ssz_fixed_len length overflow");
)*
len
} else {
ssz::BYTES_PER_LENGTH_OFFSET
}
}
fn ssz_bytes_len(&self) -> usize {
if <Self as ssz::Encode>::is_ssz_fixed_len() {
<Self as ssz::Encode>::ssz_fixed_len()
} else {
let mut len: usize = 0;
#(
if #field_is_ssz_fixed_len {
len = len
.checked_add(#field_fixed_len)
.expect("encode ssz_bytes_len length overflow");
} else {
len = len
.checked_add(ssz::BYTES_PER_LENGTH_OFFSET)
.expect("encode ssz_bytes_len length overflow for offset");
len = len
.checked_add(#field_ssz_bytes_len)
.expect("encode ssz_bytes_len length overflow for bytes");
}
)*
len
}
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
let mut offset: usize = 0;
#(
offset = offset
.checked_add(#field_fixed_len)
.expect("encode ssz_append offset overflow");
)*
let mut encoder = ssz::SszEncoder::container(buf, offset);
#(
#field_encoder_append;
)*
encoder.finalize();
}
}
};
output.into()
}
/// Derive `ssz::Encode` "transparently" for a struct which has exactly one non-skipped field.
///
/// The single field is encoded directly, making the outermost `struct` transparent.
///
/// ## Field attributes
///
/// - `#[ssz(skip_serializing)]`: the field will not be serialized.
fn ssz_encode_derive_struct_transparent(
derive_input: &DeriveInput,
struct_data: &DataStruct,
) -> TokenStream {
let name = &derive_input.ident;
let (impl_generics, ty_generics, where_clause) = &derive_input.generics.split_for_impl();
let ssz_fields = parse_ssz_fields(struct_data);
let num_fields = ssz_fields
.iter()
.filter(|(_, _, field_opts)| !field_opts.skip_deserializing)
.count();
if num_fields != 1 {
panic!(
"A \"transparent\" struct must have exactly one non-skipped field ({} fields found)",
num_fields
);
}
let (ty, ident, _field_opts) = ssz_fields
.iter()
.find(|(_, _, field_opts)| !field_opts.skip_deserializing)
.expect("\"transparent\" struct must have at least one non-skipped field");
let output = if let Some(field_name) = ident {
quote! {
impl #impl_generics ssz::Encode for #name #ty_generics #where_clause {
fn is_ssz_fixed_len() -> bool {
<#ty as ssz::Encode>::is_ssz_fixed_len()
}
fn ssz_fixed_len() -> usize {
<#ty as ssz::Encode>::ssz_fixed_len()
}
fn ssz_bytes_len(&self) -> usize {
self.#field_name.ssz_bytes_len()
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
self.#field_name.ssz_append(buf)
}
}
}
} else {
quote! {
impl #impl_generics ssz::Encode for #name #ty_generics #where_clause {
fn is_ssz_fixed_len() -> bool {
<#ty as ssz::Encode>::is_ssz_fixed_len()
}
fn ssz_fixed_len() -> usize {
<#ty as ssz::Encode>::ssz_fixed_len()
}
fn ssz_bytes_len(&self) -> usize {
self.0.ssz_bytes_len()
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
self.0.ssz_append(buf)
}
}
}
};
output.into()
}
/// Derive `ssz::Encode` for an enum in the "transparent" method.
///
/// The "transparent" method is distinct from the "union" method specified in the SSZ specification.
/// When using "transparent", the enum will be ignored and the contained field will be serialized as
/// if the enum does not exist. Since an union variant "selector" is not serialized, it is not
/// possible to reliably decode an enum that is serialized transparently.
///
/// ## Limitations
///
/// Only supports:
/// - Enums with a single field per variant, where
/// - All fields are variably sized from an SSZ-perspective (not fixed size).
///
/// ## Panics
///
/// Will panic at compile-time if the single field requirement isn't met, but will panic *at run
/// time* if the variable-size requirement isn't met.
fn ssz_encode_derive_enum_transparent(
derive_input: &DeriveInput,
enum_data: &DataEnum,
) -> TokenStream {
let name = &derive_input.ident;
let (impl_generics, ty_generics, where_clause) = &derive_input.generics.split_for_impl();
let (patterns, assert_exprs): (Vec<_>, Vec<_>) = enum_data
.variants
.iter()
.map(|variant| {
let variant_name = &variant.ident;
if variant.fields.len() != 1 {
panic!("ssz::Encode can only be derived for enums with 1 field per variant");
}
let pattern = quote! {
#name::#variant_name(ref inner)
};
let ty = &(&variant.fields).into_iter().next().unwrap().ty;
let type_assert = quote! {
!<#ty as ssz::Encode>::is_ssz_fixed_len()
};
(pattern, type_assert)
})
.unzip();
let output = quote! {
impl #impl_generics ssz::Encode for #name #ty_generics #where_clause {
fn is_ssz_fixed_len() -> bool {
assert!(
#(
#assert_exprs &&
)* true,
"not all enum variants are variably-sized"
);
false
}
fn ssz_bytes_len(&self) -> usize {
match self {
#(
#patterns => inner.ssz_bytes_len(),
)*
}
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
match self {
#(
#patterns => inner.ssz_append(buf),
)*
}
}
}
};
output.into()
}
/// Derive `ssz::Encode` for an `enum` following the "union" SSZ spec.
///
/// The union selector will be determined based upon the order in which the enum variants are
/// defined. E.g., the top-most variant in the enum will have a selector of `0`, the variant
/// beneath it will have a selector of `1` and so on.
///
/// # Limitations
///
/// Only supports enums where each variant has a single field.
fn ssz_encode_derive_enum_union(derive_input: &DeriveInput, enum_data: &DataEnum) -> TokenStream {
let name = &derive_input.ident;
let (impl_generics, ty_generics, where_clause) = &derive_input.generics.split_for_impl();
let patterns: Vec<_> = enum_data
.variants
.iter()
.map(|variant| {
let variant_name = &variant.ident;
if variant.fields.len() != 1 {
panic!("ssz::Encode can only be derived for enums with 1 field per variant");
}
let pattern = quote! {
#name::#variant_name(ref inner)
};
pattern
})
.collect();
let union_selectors = compute_union_selectors(patterns.len());
let output = quote! {
impl #impl_generics ssz::Encode for #name #ty_generics #where_clause {
fn is_ssz_fixed_len() -> bool {
false
}
fn ssz_bytes_len(&self) -> usize {
match self {
#(
#patterns => inner
.ssz_bytes_len()
.checked_add(1)
.expect("encoded length must be less than usize::max_value"),
)*
}
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
match self {
#(
#patterns => {
let union_selector: u8 = #union_selectors;
debug_assert!(union_selector <= ssz::MAX_UNION_SELECTOR);
buf.push(union_selector);
inner.ssz_append(buf)
},
)*
}
}
}
};
output.into()
}
/// Derive `ssz::Decode` for a struct or enum.
#[proc_macro_derive(Decode, attributes(ssz))]
pub fn ssz_decode_derive(input: TokenStream) -> TokenStream {
let item = parse_macro_input!(input as DeriveInput);
let procedure = Procedure::read(&item);
match procedure {
Procedure::Struct { data, behaviour } => match behaviour {
StructBehaviour::Transparent => ssz_decode_derive_struct_transparent(&item, data),
StructBehaviour::Container => ssz_decode_derive_struct(&item, data),
},
Procedure::Enum { data, behaviour } => match behaviour {
EnumBehaviour::Union => ssz_decode_derive_enum_union(&item, data),
EnumBehaviour::Transparent => panic!(
"Decode cannot be derived for enum_behaviour \"{}\", only \"{}\" is valid.",
ENUM_TRANSPARENT, ENUM_UNION
),
},
}
}
/// Implements `ssz::Decode` for some `struct`.
///
/// Fields are decoded in the order they are defined.
///
/// ## Field attributes
///
/// - `#[ssz(skip_deserializing)]`: during de-serialization the field will be instantiated from a
/// `Default` implementation. The decoder will assume that the field was not serialized at all
/// (e.g., if it has been serialized, an error will be raised instead of `Default` overriding it).
fn ssz_decode_derive_struct(item: &DeriveInput, struct_data: &DataStruct) -> TokenStream {
let name = &item.ident;
let (impl_generics, ty_generics, where_clause) = &item.generics.split_for_impl();
let mut register_types = vec![];
let mut field_names = vec![];
let mut fixed_decodes = vec![];
let mut decodes = vec![];
let mut is_fixed_lens = vec![];
let mut fixed_lens = vec![];
for (ty, ident, field_opts) in parse_ssz_fields(struct_data) {
let ident = match ident {
Some(ref ident) => ident,
_ => panic!(
"#[ssz(struct_behaviour = \"container\")] only supports named struct fields."
),
};
field_names.push(quote! {
#ident
});
// Field should not be deserialized; use a `Default` impl to instantiate.
if field_opts.skip_deserializing {
decodes.push(quote! {
let #ident = <_>::default();
});
fixed_decodes.push(quote! {
let #ident = <_>::default();
});
continue;
}
let is_ssz_fixed_len;
let ssz_fixed_len;
let from_ssz_bytes;
if let Some(module) = field_opts.with {
let module = quote! { #module::decode };
is_ssz_fixed_len = quote! { #module::is_ssz_fixed_len() };
ssz_fixed_len = quote! { #module::ssz_fixed_len() };
from_ssz_bytes = quote! { #module::from_ssz_bytes(slice) };
register_types.push(quote! {
builder.register_type_parameterized(#is_ssz_fixed_len, #ssz_fixed_len)?;
});
decodes.push(quote! {
let #ident = decoder.decode_next_with(|slice| #module::from_ssz_bytes(slice))?;
});
} else {
is_ssz_fixed_len = quote! { <#ty as ssz::Decode>::is_ssz_fixed_len() };
ssz_fixed_len = quote! { <#ty as ssz::Decode>::ssz_fixed_len() };
from_ssz_bytes = quote! { <#ty as ssz::Decode>::from_ssz_bytes(slice) };
register_types.push(quote! {
builder.register_type::<#ty>()?;
});
decodes.push(quote! {
let #ident = decoder.decode_next()?;
});
}
fixed_decodes.push(quote! {
let #ident = {
start = end;
end = end
.checked_add(#ssz_fixed_len)
.ok_or_else(|| ssz::DecodeError::OutOfBoundsByte {
i: usize::max_value()
})?;
let slice = bytes.get(start..end)
.ok_or_else(|| ssz::DecodeError::InvalidByteLength {
len: bytes.len(),
expected: end
})?;
#from_ssz_bytes?
};
});
is_fixed_lens.push(is_ssz_fixed_len);
fixed_lens.push(ssz_fixed_len);
}
let output = quote! {
impl #impl_generics ssz::Decode for #name #ty_generics #where_clause {
fn is_ssz_fixed_len() -> bool {
#(
#is_fixed_lens &&
)*
true
}
fn ssz_fixed_len() -> usize {
if <Self as ssz::Decode>::is_ssz_fixed_len() {
let mut len: usize = 0;
#(
len = len
.checked_add(#fixed_lens)
.expect("decode ssz_fixed_len overflow");
)*
len
} else {
ssz::BYTES_PER_LENGTH_OFFSET
}
}
fn from_ssz_bytes(bytes: &[u8]) -> std::result::Result<Self, ssz::DecodeError> {
if <Self as ssz::Decode>::is_ssz_fixed_len() {
if bytes.len() != <Self as ssz::Decode>::ssz_fixed_len() {
return Err(ssz::DecodeError::InvalidByteLength {
len: bytes.len(),
expected: <Self as ssz::Decode>::ssz_fixed_len(),
});
}
let mut start: usize = 0;
let mut end = start;
#(
#fixed_decodes
)*
Ok(Self {
#(
#field_names,
)*
})
} else {
let mut builder = ssz::SszDecoderBuilder::new(bytes);
#(
#register_types
)*
let mut decoder = builder.build()?;
#(
#decodes
)*
Ok(Self {
#(
#field_names,
)*
})
}
}
}
};
output.into()
}
/// Implements `ssz::Decode` "transparently" for a `struct` with exactly one non-skipped field.
///
/// The bytes will be decoded as if they are the inner field, without the outermost struct. The
/// outermost struct will then be applied artificially.
///
/// ## Field attributes
///
/// - `#[ssz(skip_deserializing)]`: during de-serialization the field will be instantiated from a
/// `Default` implementation. The decoder will assume that the field was not serialized at all
/// (e.g., if it has been serialized, an error will be raised instead of `Default` overriding it).
fn ssz_decode_derive_struct_transparent(
item: &DeriveInput,
struct_data: &DataStruct,
) -> TokenStream {
let name = &item.ident;
let (impl_generics, ty_generics, where_clause) = &item.generics.split_for_impl();
let ssz_fields = parse_ssz_fields(struct_data);
let num_fields = ssz_fields
.iter()
.filter(|(_, _, field_opts)| !field_opts.skip_deserializing)
.count();
if num_fields != 1 {
panic!(
"A \"transparent\" struct must have exactly one non-skipped field ({} fields found)",
num_fields
);
}
let mut fields = vec![];
let mut wrapped_type = None;
for (i, (ty, ident, field_opts)) in ssz_fields.into_iter().enumerate() {
if let Some(name) = ident {
if field_opts.skip_deserializing {
fields.push(quote! {
#name: <_>::default(),
});
} else {
fields.push(quote! {
#name: <_>::from_ssz_bytes(bytes)?,
});
wrapped_type = Some(ty);
}
} else {
let index = syn::Index::from(i);
if field_opts.skip_deserializing {
fields.push(quote! {
#index:<_>::default(),
});
} else {
fields.push(quote! {
#index:<_>::from_ssz_bytes(bytes)?,
});
wrapped_type = Some(ty);
}
}
}
let ty = wrapped_type.unwrap();
let output = quote! {
impl #impl_generics ssz::Decode for #name #ty_generics #where_clause {
fn is_ssz_fixed_len() -> bool {
<#ty as ssz::Decode>::is_ssz_fixed_len()
}
fn ssz_fixed_len() -> usize {
<#ty as ssz::Decode>::ssz_fixed_len()
}
fn from_ssz_bytes(bytes: &[u8]) -> std::result::Result<Self, ssz::DecodeError> {
Ok(Self {
#(
#fields
)*
})
}
}
};
output.into()
}
/// Derive `ssz::Decode` for an `enum` following the "union" SSZ spec.
fn ssz_decode_derive_enum_union(derive_input: &DeriveInput, enum_data: &DataEnum) -> TokenStream {
let name = &derive_input.ident;
let (impl_generics, ty_generics, where_clause) = &derive_input.generics.split_for_impl();
let (constructors, var_types): (Vec<_>, Vec<_>) = enum_data
.variants
.iter()
.map(|variant| {
let variant_name = &variant.ident;
if variant.fields.len() != 1 {
panic!("ssz::Encode can only be derived for enums with 1 field per variant");
}
let constructor = quote! {
#name::#variant_name
};
let ty = &(&variant.fields).into_iter().next().unwrap().ty;
(constructor, ty)
})
.unzip();
let union_selectors = compute_union_selectors(constructors.len());
let output = quote! {
impl #impl_generics ssz::Decode for #name #ty_generics #where_clause {
fn is_ssz_fixed_len() -> bool {
false
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, ssz::DecodeError> {
// Sanity check to ensure the definition here does not drift from the one defined in
// `ssz`.
debug_assert_eq!(#MAX_UNION_SELECTOR, ssz::MAX_UNION_SELECTOR);
let (selector, body) = ssz::split_union_bytes(bytes)?;
match selector.into() {
#(
#union_selectors => {
<#var_types as ssz::Decode>::from_ssz_bytes(body).map(#constructors)
},
)*
other => Err(ssz::DecodeError::UnionSelectorInvalid(other))
}
}
}
};
output.into()
}
fn compute_union_selectors(num_variants: usize) -> Vec<u8> {
let union_selectors = (0..num_variants)
.map(|i| {
i.try_into()
.expect("union selector exceeds u8::max_value, union has too many variants")
})
.collect::<Vec<u8>>();
let highest_selector = union_selectors
.last()
.copied()
.expect("0-variant union is not permitted");
assert!(
highest_selector <= MAX_UNION_SELECTOR,
"union selector {} exceeds limit of {}, enum has too many variants",
highest_selector,
MAX_UNION_SELECTOR
);
union_selectors
}

View File

@ -1,215 +0,0 @@
use ssz::{Decode, Encode};
use ssz_derive::{Decode, Encode};
use std::fmt::Debug;
use std::marker::PhantomData;
fn assert_encode<T: Encode>(item: &T, bytes: &[u8]) {
assert_eq!(item.as_ssz_bytes(), bytes);
}
fn assert_encode_decode<T: Encode + Decode + PartialEq + Debug>(item: &T, bytes: &[u8]) {
assert_encode(item, bytes);
assert_eq!(T::from_ssz_bytes(bytes).unwrap(), *item);
}
#[derive(PartialEq, Debug, Encode, Decode)]
#[ssz(enum_behaviour = "union")]
enum TwoFixedUnion {
U8(u8),
U16(u16),
}
#[derive(PartialEq, Debug, Encode, Decode)]
struct TwoFixedUnionStruct {
a: TwoFixedUnion,
}
#[test]
fn two_fixed_union() {
let eight = TwoFixedUnion::U8(1);
let sixteen = TwoFixedUnion::U16(1);
assert_encode_decode(&eight, &[0, 1]);
assert_encode_decode(&sixteen, &[1, 1, 0]);
assert_encode_decode(&TwoFixedUnionStruct { a: eight }, &[4, 0, 0, 0, 0, 1]);
assert_encode_decode(&TwoFixedUnionStruct { a: sixteen }, &[4, 0, 0, 0, 1, 1, 0]);
}
#[derive(PartialEq, Debug, Encode, Decode)]
struct VariableA {
a: u8,
b: Vec<u8>,
}
#[derive(PartialEq, Debug, Encode, Decode)]
struct VariableB {
a: Vec<u8>,
b: u8,
}
#[derive(PartialEq, Debug, Encode)]
#[ssz(enum_behaviour = "transparent")]
enum TwoVariableTrans {
A(VariableA),
B(VariableB),
}
#[derive(PartialEq, Debug, Encode)]
struct TwoVariableTransStruct {
a: TwoVariableTrans,
}
#[derive(PartialEq, Debug, Encode, Decode)]
#[ssz(enum_behaviour = "union")]
enum TwoVariableUnion {
A(VariableA),
B(VariableB),
}
#[derive(PartialEq, Debug, Encode, Decode)]
struct TwoVariableUnionStruct {
a: TwoVariableUnion,
}
#[test]
fn two_variable_trans() {
let trans_a = TwoVariableTrans::A(VariableA {
a: 1,
b: vec![2, 3],
});
let trans_b = TwoVariableTrans::B(VariableB {
a: vec![1, 2],
b: 3,
});
assert_encode(&trans_a, &[1, 5, 0, 0, 0, 2, 3]);
assert_encode(&trans_b, &[5, 0, 0, 0, 3, 1, 2]);
assert_encode(
&TwoVariableTransStruct { a: trans_a },
&[4, 0, 0, 0, 1, 5, 0, 0, 0, 2, 3],
);
assert_encode(
&TwoVariableTransStruct { a: trans_b },
&[4, 0, 0, 0, 5, 0, 0, 0, 3, 1, 2],
);
}
#[test]
fn two_variable_union() {
let union_a = TwoVariableUnion::A(VariableA {
a: 1,
b: vec![2, 3],
});
let union_b = TwoVariableUnion::B(VariableB {
a: vec![1, 2],
b: 3,
});
assert_encode_decode(&union_a, &[0, 1, 5, 0, 0, 0, 2, 3]);
assert_encode_decode(&union_b, &[1, 5, 0, 0, 0, 3, 1, 2]);
assert_encode_decode(
&TwoVariableUnionStruct { a: union_a },
&[4, 0, 0, 0, 0, 1, 5, 0, 0, 0, 2, 3],
);
assert_encode_decode(
&TwoVariableUnionStruct { a: union_b },
&[4, 0, 0, 0, 1, 5, 0, 0, 0, 3, 1, 2],
);
}
#[derive(PartialEq, Debug, Encode, Decode)]
#[ssz(enum_behaviour = "union")]
enum TwoVecUnion {
A(Vec<u8>),
B(Vec<u8>),
}
#[test]
fn two_vec_union() {
assert_encode_decode(&TwoVecUnion::A(vec![]), &[0]);
assert_encode_decode(&TwoVecUnion::B(vec![]), &[1]);
assert_encode_decode(&TwoVecUnion::A(vec![0]), &[0, 0]);
assert_encode_decode(&TwoVecUnion::B(vec![0]), &[1, 0]);
assert_encode_decode(&TwoVecUnion::A(vec![0, 1]), &[0, 0, 1]);
assert_encode_decode(&TwoVecUnion::B(vec![0, 1]), &[1, 0, 1]);
}
#[derive(PartialEq, Debug, Encode, Decode)]
#[ssz(struct_behaviour = "transparent")]
struct TransparentStruct {
inner: Vec<u8>,
}
impl TransparentStruct {
fn new(inner: u8) -> Self {
Self { inner: vec![inner] }
}
}
#[test]
fn transparent_struct() {
assert_encode_decode(&TransparentStruct::new(42), &vec![42_u8].as_ssz_bytes());
}
#[derive(PartialEq, Debug, Encode, Decode)]
#[ssz(struct_behaviour = "transparent")]
struct TransparentStructSkippedField {
inner: Vec<u8>,
#[ssz(skip_serializing, skip_deserializing)]
skipped: PhantomData<u64>,
}
impl TransparentStructSkippedField {
fn new(inner: u8) -> Self {
Self {
inner: vec![inner],
skipped: PhantomData,
}
}
}
#[test]
fn transparent_struct_skipped_field() {
assert_encode_decode(
&TransparentStructSkippedField::new(42),
&vec![42_u8].as_ssz_bytes(),
);
}
#[derive(PartialEq, Debug, Encode, Decode)]
#[ssz(struct_behaviour = "transparent")]
struct TransparentStructNewType(Vec<u8>);
#[test]
fn transparent_struct_newtype() {
assert_encode_decode(
&TransparentStructNewType(vec![42_u8]),
&vec![42_u8].as_ssz_bytes(),
);
}
#[derive(PartialEq, Debug, Encode, Decode)]
#[ssz(struct_behaviour = "transparent")]
struct TransparentStructNewTypeSkippedField(
Vec<u8>,
#[ssz(skip_serializing, skip_deserializing)] PhantomData<u64>,
);
impl TransparentStructNewTypeSkippedField {
fn new(inner: Vec<u8>) -> Self {
Self(inner, PhantomData)
}
}
#[test]
fn transparent_struct_newtype_skipped_field() {
assert_encode_decode(
&TransparentStructNewTypeSkippedField::new(vec![42_u8]),
&vec![42_u8].as_ssz_bytes(),
);
}

View File

@ -1,25 +0,0 @@
[package]
name = "eth2_ssz_types"
version = "0.2.2"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2021"
description = "Provides types with unique properties required for SSZ serialization and Merklization."
license = "Apache-2.0"
[lib]
name = "ssz_types"
[dependencies]
tree_hash = "0.4.1"
serde = "1.0.116"
serde_derive = "1.0.116"
eth2_serde_utils = "0.1.1"
eth2_ssz = "0.4.1"
typenum = "1.12.0"
arbitrary = { version = "1.0", features = ["derive"], optional = true }
derivative = "2.1.1"
smallvec = "1.8.0"
[dev-dependencies]
serde_json = "1.0.58"
tree_hash_derive = "0.4.0"

File diff suppressed because it is too large Load Diff

View File

@ -1,446 +0,0 @@
use crate::tree_hash::vec_tree_hash_root;
use crate::Error;
use derivative::Derivative;
use serde_derive::{Deserialize, Serialize};
use std::marker::PhantomData;
use std::ops::{Deref, DerefMut, Index, IndexMut};
use std::slice::SliceIndex;
use tree_hash::Hash256;
use typenum::Unsigned;
pub use typenum;
/// Emulates a SSZ `Vector` (distinct from a Rust `Vec`).
///
/// An ordered, heap-allocated, fixed-length, homogeneous collection of `T`, with `N` values.
///
/// This struct is backed by a Rust `Vec` but constrained such that it must be instantiated with a
/// fixed number of elements and you may not add or remove elements, only modify.
///
/// The length of this struct is fixed at the type-level using
/// [typenum](https://crates.io/crates/typenum).
///
/// ## Note
///
/// Whilst it is possible with this library, SSZ declares that a `FixedVector` with a length of `0`
/// is illegal.
///
/// ## Example
///
/// ```
/// use ssz_types::{FixedVector, typenum};
///
/// let base: Vec<u64> = vec![1, 2, 3, 4];
///
/// // Create a `FixedVector` from a `Vec` that has the expected length.
/// let exact: FixedVector<_, typenum::U4> = FixedVector::from(base.clone());
/// assert_eq!(&exact[..], &[1, 2, 3, 4]);
///
/// // Create a `FixedVector` from a `Vec` that is too long and the `Vec` is truncated.
/// let short: FixedVector<_, typenum::U3> = FixedVector::from(base.clone());
/// assert_eq!(&short[..], &[1, 2, 3]);
///
/// // Create a `FixedVector` from a `Vec` that is too short and the missing values are created
/// // using `std::default::Default`.
/// let long: FixedVector<_, typenum::U5> = FixedVector::from(base);
/// assert_eq!(&long[..], &[1, 2, 3, 4, 0]);
/// ```
#[derive(Debug, Clone, Serialize, Deserialize, Derivative)]
#[derivative(PartialEq, Hash(bound = "T: std::hash::Hash"))]
#[serde(transparent)]
pub struct FixedVector<T, N> {
vec: Vec<T>,
_phantom: PhantomData<N>,
}
impl<T, N: Unsigned> FixedVector<T, N> {
/// Returns `Ok` if the given `vec` equals the fixed length of `Self`. Otherwise returns
/// `Err`.
pub fn new(vec: Vec<T>) -> Result<Self, Error> {
if vec.len() == Self::capacity() {
Ok(Self {
vec,
_phantom: PhantomData,
})
} else {
Err(Error::OutOfBounds {
i: vec.len(),
len: Self::capacity(),
})
}
}
/// Create a new vector filled with clones of `elem`.
pub fn from_elem(elem: T) -> Self
where
T: Clone,
{
Self {
vec: vec![elem; N::to_usize()],
_phantom: PhantomData,
}
}
/// Identical to `self.capacity`, returns the type-level constant length.
///
/// Exists for compatibility with `Vec`.
pub fn len(&self) -> usize {
self.vec.len()
}
/// True if the type-level constant length of `self` is zero.
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns the type-level constant length.
pub fn capacity() -> usize {
N::to_usize()
}
}
impl<T: Default, N: Unsigned> From<Vec<T>> for FixedVector<T, N> {
fn from(mut vec: Vec<T>) -> Self {
vec.resize_with(Self::capacity(), Default::default);
Self {
vec,
_phantom: PhantomData,
}
}
}
impl<T, N: Unsigned> From<FixedVector<T, N>> for Vec<T> {
fn from(vector: FixedVector<T, N>) -> Vec<T> {
vector.vec
}
}
impl<T: Default, N: Unsigned> Default for FixedVector<T, N> {
fn default() -> Self {
Self {
vec: (0..N::to_usize()).map(|_| T::default()).collect(),
_phantom: PhantomData,
}
}
}
impl<T, N: Unsigned, I: SliceIndex<[T]>> Index<I> for FixedVector<T, N> {
type Output = I::Output;
#[inline]
fn index(&self, index: I) -> &Self::Output {
Index::index(&self.vec, index)
}
}
impl<T, N: Unsigned, I: SliceIndex<[T]>> IndexMut<I> for FixedVector<T, N> {
#[inline]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut self.vec, index)
}
}
impl<T, N: Unsigned> Deref for FixedVector<T, N> {
type Target = [T];
fn deref(&self) -> &[T] {
&self.vec[..]
}
}
// This implementation is required to use `get_mut` to access elements.
//
// It's safe because none of the methods on mutable slices allow changing the length
// of the backing vec.
impl<T, N: Unsigned> DerefMut for FixedVector<T, N> {
fn deref_mut(&mut self) -> &mut [T] {
&mut self.vec[..]
}
}
impl<T, N: Unsigned> tree_hash::TreeHash for FixedVector<T, N>
where
T: tree_hash::TreeHash,
{
fn tree_hash_type() -> tree_hash::TreeHashType {
tree_hash::TreeHashType::Vector
}
fn tree_hash_packed_encoding(&self) -> tree_hash::PackedEncoding {
unreachable!("Vector should never be packed.")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("Vector should never be packed.")
}
fn tree_hash_root(&self) -> Hash256 {
vec_tree_hash_root::<T, N>(&self.vec)
}
}
impl<T, N: Unsigned> ssz::Encode for FixedVector<T, N>
where
T: ssz::Encode,
{
fn is_ssz_fixed_len() -> bool {
T::is_ssz_fixed_len()
}
fn ssz_fixed_len() -> usize {
if <Self as ssz::Encode>::is_ssz_fixed_len() {
T::ssz_fixed_len() * N::to_usize()
} else {
ssz::BYTES_PER_LENGTH_OFFSET
}
}
fn ssz_bytes_len(&self) -> usize {
self.vec.ssz_bytes_len()
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
if T::is_ssz_fixed_len() {
buf.reserve(T::ssz_fixed_len() * self.len());
for item in &self.vec {
item.ssz_append(buf);
}
} else {
let mut encoder =
ssz::SszEncoder::container(buf, self.len() * ssz::BYTES_PER_LENGTH_OFFSET);
for item in &self.vec {
encoder.append(item);
}
encoder.finalize();
}
}
}
impl<T, N: Unsigned> ssz::Decode for FixedVector<T, N>
where
T: ssz::Decode,
{
fn is_ssz_fixed_len() -> bool {
T::is_ssz_fixed_len()
}
fn ssz_fixed_len() -> usize {
if <Self as ssz::Decode>::is_ssz_fixed_len() {
T::ssz_fixed_len() * N::to_usize()
} else {
ssz::BYTES_PER_LENGTH_OFFSET
}
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, ssz::DecodeError> {
let fixed_len = N::to_usize();
if bytes.is_empty() {
Err(ssz::DecodeError::InvalidByteLength {
len: 0,
expected: 1,
})
} else if T::is_ssz_fixed_len() {
let num_items = bytes
.len()
.checked_div(T::ssz_fixed_len())
.ok_or(ssz::DecodeError::ZeroLengthItem)?;
if num_items != fixed_len {
return Err(ssz::DecodeError::BytesInvalid(format!(
"FixedVector of {} items has {} items",
num_items, fixed_len
)));
}
bytes
.chunks(T::ssz_fixed_len())
.map(|chunk| T::from_ssz_bytes(chunk))
.collect::<Result<Vec<T>, _>>()
.and_then(|vec| {
Self::new(vec).map_err(|e| {
ssz::DecodeError::BytesInvalid(format!(
"Wrong number of FixedVector elements: {:?}",
e
))
})
})
} else {
let vec = ssz::decode_list_of_variable_length_items(bytes, Some(fixed_len))?;
Self::new(vec).map_err(|e| {
ssz::DecodeError::BytesInvalid(format!(
"Wrong number of FixedVector elements: {:?}",
e
))
})
}
}
}
#[cfg(feature = "arbitrary")]
impl<'a, T: arbitrary::Arbitrary<'a>, N: 'static + Unsigned> arbitrary::Arbitrary<'a>
for FixedVector<T, N>
{
fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
let size = N::to_usize();
let mut vec: Vec<T> = Vec::with_capacity(size);
for _ in 0..size {
vec.push(<T>::arbitrary(u)?);
}
Self::new(vec).map_err(|_| arbitrary::Error::IncorrectFormat)
}
}
#[cfg(test)]
mod test {
use super::*;
use ssz::*;
use tree_hash::{merkle_root, TreeHash};
use tree_hash_derive::TreeHash;
use typenum::*;
#[test]
fn new() {
let vec = vec![42; 5];
let fixed: Result<FixedVector<u64, U4>, _> = FixedVector::new(vec);
assert!(fixed.is_err());
let vec = vec![42; 3];
let fixed: Result<FixedVector<u64, U4>, _> = FixedVector::new(vec);
assert!(fixed.is_err());
let vec = vec![42; 4];
let fixed: Result<FixedVector<u64, U4>, _> = FixedVector::new(vec);
assert!(fixed.is_ok());
}
#[test]
fn indexing() {
let vec = vec![1, 2];
let mut fixed: FixedVector<u64, U8192> = vec.clone().into();
assert_eq!(fixed[0], 1);
assert_eq!(&fixed[0..1], &vec[0..1]);
assert_eq!((fixed[..]).len(), 8192);
fixed[1] = 3;
assert_eq!(fixed[1], 3);
}
#[test]
fn length() {
let vec = vec![42; 5];
let fixed: FixedVector<u64, U4> = FixedVector::from(vec.clone());
assert_eq!(&fixed[..], &vec[0..4]);
let vec = vec![42; 3];
let fixed: FixedVector<u64, U4> = FixedVector::from(vec.clone());
assert_eq!(&fixed[0..3], &vec[..]);
assert_eq!(&fixed[..], &vec![42, 42, 42, 0][..]);
let vec = vec![];
let fixed: FixedVector<u64, U4> = FixedVector::from(vec);
assert_eq!(&fixed[..], &vec![0, 0, 0, 0][..]);
}
#[test]
fn deref() {
let vec = vec![0, 2, 4, 6];
let fixed: FixedVector<u64, U4> = FixedVector::from(vec);
assert_eq!(fixed.first(), Some(&0));
assert_eq!(fixed.get(3), Some(&6));
assert_eq!(fixed.get(4), None);
}
#[test]
fn ssz_encode() {
let vec: FixedVector<u16, U2> = vec![0; 2].into();
assert_eq!(vec.as_ssz_bytes(), vec![0, 0, 0, 0]);
assert_eq!(<FixedVector<u16, U2> as Encode>::ssz_fixed_len(), 4);
}
fn ssz_round_trip<T: Encode + Decode + std::fmt::Debug + PartialEq>(item: T) {
let encoded = &item.as_ssz_bytes();
assert_eq!(item.ssz_bytes_len(), encoded.len());
assert_eq!(T::from_ssz_bytes(encoded), Ok(item));
}
#[test]
fn ssz_round_trip_u16_len_8() {
ssz_round_trip::<FixedVector<u16, U8>>(vec![42; 8].into());
ssz_round_trip::<FixedVector<u16, U8>>(vec![0; 8].into());
}
#[test]
fn tree_hash_u8() {
let fixed: FixedVector<u8, U0> = FixedVector::from(vec![]);
assert_eq!(fixed.tree_hash_root(), merkle_root(&[0; 8], 0));
let fixed: FixedVector<u8, U1> = FixedVector::from(vec![0; 1]);
assert_eq!(fixed.tree_hash_root(), merkle_root(&[0; 8], 0));
let fixed: FixedVector<u8, U8> = FixedVector::from(vec![0; 8]);
assert_eq!(fixed.tree_hash_root(), merkle_root(&[0; 8], 0));
let fixed: FixedVector<u8, U16> = FixedVector::from(vec![42; 16]);
assert_eq!(fixed.tree_hash_root(), merkle_root(&[42; 16], 0));
let source: Vec<u8> = (0..16).collect();
let fixed: FixedVector<u8, U16> = FixedVector::from(source.clone());
assert_eq!(fixed.tree_hash_root(), merkle_root(&source, 0));
}
#[derive(Clone, Copy, TreeHash, Default)]
struct A {
a: u32,
b: u32,
}
fn repeat(input: &[u8], n: usize) -> Vec<u8> {
let mut output = vec![];
for _ in 0..n {
output.append(&mut input.to_vec());
}
output
}
#[test]
fn tree_hash_composite() {
let a = A { a: 0, b: 1 };
let fixed: FixedVector<A, U0> = FixedVector::from(vec![]);
assert_eq!(fixed.tree_hash_root(), merkle_root(&[0; 32], 0));
let fixed: FixedVector<A, U1> = FixedVector::from(vec![a]);
assert_eq!(
fixed.tree_hash_root(),
merkle_root(a.tree_hash_root().as_bytes(), 0)
);
let fixed: FixedVector<A, U8> = FixedVector::from(vec![a; 8]);
assert_eq!(
fixed.tree_hash_root(),
merkle_root(&repeat(a.tree_hash_root().as_bytes(), 8), 0)
);
let fixed: FixedVector<A, U13> = FixedVector::from(vec![a; 13]);
assert_eq!(
fixed.tree_hash_root(),
merkle_root(&repeat(a.tree_hash_root().as_bytes(), 13), 0)
);
let fixed: FixedVector<A, U16> = FixedVector::from(vec![a; 16]);
assert_eq!(
fixed.tree_hash_root(),
merkle_root(&repeat(a.tree_hash_root().as_bytes(), 16), 0)
);
}
}

View File

@ -1,72 +0,0 @@
//! Provides types with unique properties required for SSZ serialization and Merklization:
//!
//! - `FixedVector`: A heap-allocated list with a size that is fixed at compile time.
//! - `VariableList`: A heap-allocated list that cannot grow past a type-level maximum length.
//! - `BitList`: A heap-allocated bitfield that with a type-level _maximum_ length.
//! - `BitVector`: A heap-allocated bitfield that with a type-level _fixed__ length.
//!
//! These structs are required as SSZ serialization and Merklization rely upon type-level lengths
//! for padding and verification.
//!
//! Adheres to the Ethereum 2.0 [SSZ
//! specification](https://github.com/ethereum/eth2.0-specs/blob/v0.12.1/ssz/simple-serialize.md)
//! at v0.12.1.
//!
//! ## Example
//! ```
//! use ssz_types::*;
//!
//! pub struct Example {
//! bit_vector: BitVector<typenum::U8>,
//! bit_list: BitList<typenum::U8>,
//! variable_list: VariableList<u64, typenum::U8>,
//! fixed_vector: FixedVector<u64, typenum::U8>,
//! }
//!
//! let mut example = Example {
//! bit_vector: Bitfield::new(),
//! bit_list: Bitfield::with_capacity(4).unwrap(),
//! variable_list: <_>::from(vec![0, 1]),
//! fixed_vector: <_>::from(vec![2, 3]),
//! };
//!
//! assert_eq!(example.bit_vector.len(), 8);
//! assert_eq!(example.bit_list.len(), 4);
//! assert_eq!(&example.variable_list[..], &[0, 1]);
//! assert_eq!(&example.fixed_vector[..], &[2, 3, 0, 0, 0, 0, 0, 0]);
//!
//! ```
#[macro_use]
mod bitfield;
mod fixed_vector;
pub mod serde_utils;
mod tree_hash;
mod variable_list;
pub use bitfield::{BitList, BitVector, Bitfield};
pub use fixed_vector::FixedVector;
pub use typenum;
pub use variable_list::VariableList;
pub mod length {
pub use crate::bitfield::{Fixed, Variable};
}
/// Returned when an item encounters an error.
#[derive(PartialEq, Debug, Clone)]
pub enum Error {
OutOfBounds {
i: usize,
len: usize,
},
/// A `BitList` does not have a set bit, therefore it's length is unknowable.
MissingLengthInformation,
/// A `BitList` has excess bits set to true.
ExcessBits,
/// A `BitList` has an invalid number of bytes for a given bit length.
InvalidByteCount {
given: usize,
expected: usize,
},
}

View File

@ -1,22 +0,0 @@
use crate::FixedVector;
use eth2_serde_utils::hex::{self, PrefixedHexVisitor};
use serde::{Deserializer, Serializer};
use typenum::Unsigned;
pub fn serialize<S, U>(bytes: &FixedVector<u8, U>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
U: Unsigned,
{
serializer.serialize_str(&hex::encode(&bytes[..]))
}
pub fn deserialize<'de, D, U>(deserializer: D) -> Result<FixedVector<u8, U>, D::Error>
where
D: Deserializer<'de>,
U: Unsigned,
{
let vec = deserializer.deserialize_string(PrefixedHexVisitor)?;
FixedVector::new(vec)
.map_err(|e| serde::de::Error::custom(format!("invalid fixed vector: {:?}", e)))
}

View File

@ -1,23 +0,0 @@
//! Serialize `VariableList<u8, N>` as 0x-prefixed hex string.
use crate::VariableList;
use eth2_serde_utils::hex::{self, PrefixedHexVisitor};
use serde::{Deserializer, Serializer};
use typenum::Unsigned;
pub fn serialize<S, N>(bytes: &VariableList<u8, N>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
N: Unsigned,
{
serializer.serialize_str(&hex::encode(&**bytes))
}
pub fn deserialize<'de, D, N>(deserializer: D) -> Result<VariableList<u8, N>, D::Error>
where
D: Deserializer<'de>,
N: Unsigned,
{
let bytes = deserializer.deserialize_str(PrefixedHexVisitor)?;
VariableList::new(bytes)
.map_err(|e| serde::de::Error::custom(format!("invalid variable list: {:?}", e)))
}

View File

@ -1,77 +0,0 @@
//! Serialize `VaraibleList<VariableList<u8, M>, N>` as list of 0x-prefixed hex string.
use crate::VariableList;
use serde::{ser::SerializeSeq, Deserialize, Deserializer, Serialize, Serializer};
use std::marker::PhantomData;
use typenum::Unsigned;
#[derive(Deserialize)]
#[serde(transparent)]
pub struct WrappedListOwned<N: Unsigned>(
#[serde(with = "crate::serde_utils::hex_var_list")] VariableList<u8, N>,
);
#[derive(Serialize)]
#[serde(transparent)]
pub struct WrappedListRef<'a, N: Unsigned>(
#[serde(with = "crate::serde_utils::hex_var_list")] &'a VariableList<u8, N>,
);
pub fn serialize<S, M, N>(
list: &VariableList<VariableList<u8, M>, N>,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
M: Unsigned,
N: Unsigned,
{
let mut seq = serializer.serialize_seq(Some(list.len()))?;
for bytes in list {
seq.serialize_element(&WrappedListRef(bytes))?;
}
seq.end()
}
#[derive(Default)]
pub struct Visitor<M, N> {
_phantom_m: PhantomData<M>,
_phantom_n: PhantomData<N>,
}
impl<'a, M, N> serde::de::Visitor<'a> for Visitor<M, N>
where
M: Unsigned,
N: Unsigned,
{
type Value = VariableList<VariableList<u8, M>, N>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a list of 0x-prefixed hex bytes")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: serde::de::SeqAccess<'a>,
{
let mut list: VariableList<VariableList<u8, M>, N> = <_>::default();
while let Some(val) = seq.next_element::<WrappedListOwned<M>>()? {
list.push(val.0).map_err(|e| {
serde::de::Error::custom(format!("failed to push value to list: {:?}.", e))
})?;
}
Ok(list)
}
}
pub fn deserialize<'de, D, M, N>(
deserializer: D,
) -> Result<VariableList<VariableList<u8, M>, N>, D::Error>
where
D: Deserializer<'de>,
M: Unsigned,
N: Unsigned,
{
deserializer.deserialize_seq(Visitor::default())
}

View File

@ -1,5 +0,0 @@
pub mod hex_fixed_vec;
pub mod hex_var_list;
pub mod list_of_hex_var_list;
pub mod quoted_u64_fixed_vec;
pub mod quoted_u64_var_list;

View File

@ -1,113 +0,0 @@
//! Formats `FixedVector<u64,N>` using quotes.
//!
//! E.g., `FixedVector::from(vec![0, 1, 2])` serializes as `["0", "1", "2"]`.
//!
//! Quotes can be optional during decoding. If `N` does not equal the length deserialization will fail.
use crate::serde_utils::quoted_u64_var_list::deserialize_max;
use crate::FixedVector;
use eth2_serde_utils::quoted_u64_vec::QuotedIntWrapper;
use serde::ser::SerializeSeq;
use serde::{Deserializer, Serializer};
use std::marker::PhantomData;
use typenum::Unsigned;
pub struct QuotedIntFixedVecVisitor<N> {
_phantom: PhantomData<N>,
}
impl<'a, N> serde::de::Visitor<'a> for QuotedIntFixedVecVisitor<N>
where
N: Unsigned,
{
type Value = FixedVector<u64, N>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a list of quoted or unquoted integers")
}
fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
where
A: serde::de::SeqAccess<'a>,
{
let vec = deserialize_max(seq, N::to_usize())?;
let fix: FixedVector<u64, N> = FixedVector::new(vec)
.map_err(|e| serde::de::Error::custom(format!("FixedVector: {:?}", e)))?;
Ok(fix)
}
}
pub fn serialize<S>(value: &[u64], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(value.len()))?;
for &int in value {
seq.serialize_element(&QuotedIntWrapper { int })?;
}
seq.end()
}
pub fn deserialize<'de, D, N>(deserializer: D) -> Result<FixedVector<u64, N>, D::Error>
where
D: Deserializer<'de>,
N: Unsigned,
{
deserializer.deserialize_any(QuotedIntFixedVecVisitor {
_phantom: PhantomData,
})
}
#[cfg(test)]
mod test {
use super::*;
use serde_derive::{Deserialize, Serialize};
use typenum::U4;
#[derive(Debug, Serialize, Deserialize)]
struct Obj {
#[serde(with = "crate::serde_utils::quoted_u64_fixed_vec")]
values: FixedVector<u64, U4>,
}
#[test]
fn quoted_list_success() {
let obj: Obj = serde_json::from_str(r#"{ "values": ["1", "2", "3", "4"] }"#).unwrap();
let expected: FixedVector<u64, U4> = FixedVector::from(vec![1, 2, 3, 4]);
assert_eq!(obj.values, expected);
}
#[test]
fn unquoted_list_success() {
let obj: Obj = serde_json::from_str(r#"{ "values": [1, 2, 3, 4] }"#).unwrap();
let expected: FixedVector<u64, U4> = FixedVector::from(vec![1, 2, 3, 4]);
assert_eq!(obj.values, expected);
}
#[test]
fn mixed_list_success() {
let obj: Obj = serde_json::from_str(r#"{ "values": ["1", 2, "3", "4"] }"#).unwrap();
let expected: FixedVector<u64, U4> = FixedVector::from(vec![1, 2, 3, 4]);
assert_eq!(obj.values, expected);
}
#[test]
fn empty_list_err() {
serde_json::from_str::<Obj>(r#"{ "values": [] }"#).unwrap_err();
}
#[test]
fn short_list_err() {
serde_json::from_str::<Obj>(r#"{ "values": [1, 2] }"#).unwrap_err();
}
#[test]
fn long_list_err() {
serde_json::from_str::<Obj>(r#"{ "values": [1, 2, 3, 4, 5] }"#).unwrap_err();
}
#[test]
fn whole_list_quoted_err() {
serde_json::from_str::<Obj>(r#"{ "values": "[1, 2, 3, 4]" }"#).unwrap_err();
}
}

View File

@ -1,139 +0,0 @@
//! Formats `VariableList<u64,N>` using quotes.
//!
//! E.g., `VariableList::from(vec![0, 1, 2])` serializes as `["0", "1", "2"]`.
//!
//! Quotes can be optional during decoding. If the length of the `Vec` is greater than `N`, deserialization fails.
use crate::VariableList;
use eth2_serde_utils::quoted_u64_vec::QuotedIntWrapper;
use serde::ser::SerializeSeq;
use serde::{Deserializer, Serializer};
use std::marker::PhantomData;
use typenum::Unsigned;
pub struct QuotedIntVarListVisitor<N> {
_phantom: PhantomData<N>,
}
impl<'a, N> serde::de::Visitor<'a> for QuotedIntVarListVisitor<N>
where
N: Unsigned,
{
type Value = VariableList<u64, N>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a list of quoted or unquoted integers")
}
fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
where
A: serde::de::SeqAccess<'a>,
{
let vec = deserialize_max(seq, N::to_usize())?;
let list: VariableList<u64, N> = VariableList::new(vec)
.map_err(|e| serde::de::Error::custom(format!("VariableList: {:?}", e)))?;
Ok(list)
}
}
pub fn serialize<S>(value: &[u64], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(value.len()))?;
for &int in value {
seq.serialize_element(&QuotedIntWrapper { int })?;
}
seq.end()
}
pub fn deserialize<'de, D, N>(deserializer: D) -> Result<VariableList<u64, N>, D::Error>
where
D: Deserializer<'de>,
N: Unsigned,
{
deserializer.deserialize_any(QuotedIntVarListVisitor {
_phantom: PhantomData,
})
}
/// Returns a `Vec` of no more than `max_items` length.
pub(crate) fn deserialize_max<'a, A>(mut seq: A, max_items: usize) -> Result<Vec<u64>, A::Error>
where
A: serde::de::SeqAccess<'a>,
{
let mut vec = vec![];
let mut counter = 0;
while let Some(val) = seq.next_element()? {
let val: QuotedIntWrapper = val;
counter += 1;
if counter > max_items {
return Err(serde::de::Error::custom(format!(
"Deserialization failed. Length cannot be greater than {}.",
max_items
)));
}
vec.push(val.int);
}
Ok(vec)
}
#[cfg(test)]
mod test {
use super::*;
use serde_derive::{Deserialize, Serialize};
use typenum::U4;
#[derive(Debug, Serialize, Deserialize)]
struct Obj {
#[serde(with = "crate::serde_utils::quoted_u64_var_list")]
values: VariableList<u64, U4>,
}
#[test]
fn quoted_list_success() {
let obj: Obj = serde_json::from_str(r#"{ "values": ["1", "2", "3", "4"] }"#).unwrap();
let expected: VariableList<u64, U4> = VariableList::from(vec![1, 2, 3, 4]);
assert_eq!(obj.values, expected);
}
#[test]
fn unquoted_list_success() {
let obj: Obj = serde_json::from_str(r#"{ "values": [1, 2, 3, 4] }"#).unwrap();
let expected: VariableList<u64, U4> = VariableList::from(vec![1, 2, 3, 4]);
assert_eq!(obj.values, expected);
}
#[test]
fn mixed_list_success() {
let obj: Obj = serde_json::from_str(r#"{ "values": ["1", 2, "3", "4"] }"#).unwrap();
let expected: VariableList<u64, U4> = VariableList::from(vec![1, 2, 3, 4]);
assert_eq!(obj.values, expected);
}
#[test]
fn empty_list_success() {
let obj: Obj = serde_json::from_str(r#"{ "values": [] }"#).unwrap();
assert!(obj.values.is_empty());
}
#[test]
fn short_list_success() {
let obj: Obj = serde_json::from_str(r#"{ "values": [1, 2] }"#).unwrap();
let expected: VariableList<u64, U4> = VariableList::from(vec![1, 2]);
assert_eq!(obj.values, expected);
}
#[test]
fn long_list_err() {
serde_json::from_str::<Obj>(r#"{ "values": [1, 2, 3, 4, 5] }"#).unwrap_err();
}
#[test]
fn whole_list_quoted_err() {
serde_json::from_str::<Obj>(r#"{ "values": "[1, 2, 3, 4]" }"#).unwrap_err();
}
}

View File

@ -1,58 +0,0 @@
use tree_hash::{Hash256, MerkleHasher, TreeHash, TreeHashType, BYTES_PER_CHUNK};
use typenum::Unsigned;
/// A helper function providing common functionality between the `TreeHash` implementations for
/// `FixedVector` and `VariableList`.
pub fn vec_tree_hash_root<T, N>(vec: &[T]) -> Hash256
where
T: TreeHash,
N: Unsigned,
{
match T::tree_hash_type() {
TreeHashType::Basic => {
let mut hasher = MerkleHasher::with_leaves(
(N::to_usize() + T::tree_hash_packing_factor() - 1) / T::tree_hash_packing_factor(),
);
for item in vec {
hasher
.write(&item.tree_hash_packed_encoding())
.expect("ssz_types variable vec should not contain more elements than max");
}
hasher
.finish()
.expect("ssz_types variable vec should not have a remaining buffer")
}
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
let mut hasher = MerkleHasher::with_leaves(N::to_usize());
for item in vec {
hasher
.write(item.tree_hash_root().as_bytes())
.expect("ssz_types vec should not contain more elements than max");
}
hasher
.finish()
.expect("ssz_types vec should not have a remaining buffer")
}
}
}
/// A helper function providing common functionality for finding the Merkle root of some bytes that
/// represent a bitfield.
pub fn bitfield_bytes_tree_hash_root<N: Unsigned>(bytes: &[u8]) -> Hash256 {
let byte_size = (N::to_usize() + 7) / 8;
let leaf_count = (byte_size + BYTES_PER_CHUNK - 1) / BYTES_PER_CHUNK;
let mut hasher = MerkleHasher::with_leaves(leaf_count);
hasher
.write(bytes)
.expect("bitfield should not exceed tree hash leaf limit");
hasher
.finish()
.expect("bitfield tree hash buffer should not exceed leaf limit")
}

View File

@ -1,477 +0,0 @@
use crate::tree_hash::vec_tree_hash_root;
use crate::Error;
use derivative::Derivative;
use serde_derive::{Deserialize, Serialize};
use std::marker::PhantomData;
use std::ops::{Deref, DerefMut, Index, IndexMut};
use std::slice::SliceIndex;
use tree_hash::Hash256;
use typenum::Unsigned;
pub use typenum;
/// Emulates a SSZ `List`.
///
/// An ordered, heap-allocated, variable-length, homogeneous collection of `T`, with no more than
/// `N` values.
///
/// This struct is backed by a Rust `Vec` but constrained such that it must be instantiated with a
/// fixed number of elements and you may not add or remove elements, only modify.
///
/// The length of this struct is fixed at the type-level using
/// [typenum](https://crates.io/crates/typenum).
///
/// ## Example
///
/// ```
/// use ssz_types::{VariableList, typenum};
///
/// let base: Vec<u64> = vec![1, 2, 3, 4];
///
/// // Create a `VariableList` from a `Vec` that has the expected length.
/// let exact: VariableList<_, typenum::U4> = VariableList::from(base.clone());
/// assert_eq!(&exact[..], &[1, 2, 3, 4]);
///
/// // Create a `VariableList` from a `Vec` that is too long and the `Vec` is truncated.
/// let short: VariableList<_, typenum::U3> = VariableList::from(base.clone());
/// assert_eq!(&short[..], &[1, 2, 3]);
///
/// // Create a `VariableList` from a `Vec` that is shorter than the maximum.
/// let mut long: VariableList<_, typenum::U5> = VariableList::from(base);
/// assert_eq!(&long[..], &[1, 2, 3, 4]);
///
/// // Push a value to if it does not exceed the maximum
/// long.push(5).unwrap();
/// assert_eq!(&long[..], &[1, 2, 3, 4, 5]);
///
/// // Push a value to if it _does_ exceed the maximum.
/// assert!(long.push(6).is_err());
/// ```
#[derive(Debug, Clone, Serialize, Deserialize, Derivative)]
#[derivative(PartialEq, Eq, Hash(bound = "T: std::hash::Hash"))]
#[serde(transparent)]
pub struct VariableList<T, N> {
vec: Vec<T>,
_phantom: PhantomData<N>,
}
impl<T, N: Unsigned> VariableList<T, N> {
/// Returns `Some` if the given `vec` equals the fixed length of `Self`. Otherwise returns
/// `None`.
pub fn new(vec: Vec<T>) -> Result<Self, Error> {
if vec.len() <= N::to_usize() {
Ok(Self {
vec,
_phantom: PhantomData,
})
} else {
Err(Error::OutOfBounds {
i: vec.len(),
len: Self::max_len(),
})
}
}
/// Create an empty list.
pub fn empty() -> Self {
Self {
vec: vec![],
_phantom: PhantomData,
}
}
/// Returns the number of values presently in `self`.
pub fn len(&self) -> usize {
self.vec.len()
}
/// True if `self` does not contain any values.
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns the type-level maximum length.
pub fn max_len() -> usize {
N::to_usize()
}
/// Appends `value` to the back of `self`.
///
/// Returns `Err(())` when appending `value` would exceed the maximum length.
pub fn push(&mut self, value: T) -> Result<(), Error> {
if self.vec.len() < Self::max_len() {
self.vec.push(value);
Ok(())
} else {
Err(Error::OutOfBounds {
i: self.vec.len() + 1,
len: Self::max_len(),
})
}
}
}
impl<T, N: Unsigned> From<Vec<T>> for VariableList<T, N> {
fn from(mut vec: Vec<T>) -> Self {
vec.truncate(N::to_usize());
Self {
vec,
_phantom: PhantomData,
}
}
}
impl<T, N: Unsigned> From<VariableList<T, N>> for Vec<T> {
fn from(list: VariableList<T, N>) -> Vec<T> {
list.vec
}
}
impl<T, N: Unsigned> Default for VariableList<T, N> {
fn default() -> Self {
Self {
vec: Vec::default(),
_phantom: PhantomData,
}
}
}
impl<T, N: Unsigned, I: SliceIndex<[T]>> Index<I> for VariableList<T, N> {
type Output = I::Output;
#[inline]
fn index(&self, index: I) -> &Self::Output {
Index::index(&self.vec, index)
}
}
impl<T, N: Unsigned, I: SliceIndex<[T]>> IndexMut<I> for VariableList<T, N> {
#[inline]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut self.vec, index)
}
}
impl<T, N: Unsigned> Deref for VariableList<T, N> {
type Target = [T];
fn deref(&self) -> &[T] {
&self.vec[..]
}
}
impl<T, N: Unsigned> DerefMut for VariableList<T, N> {
fn deref_mut(&mut self) -> &mut [T] {
&mut self.vec[..]
}
}
impl<'a, T, N: Unsigned> IntoIterator for &'a VariableList<T, N> {
type Item = &'a T;
type IntoIter = std::slice::Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<T, N: Unsigned> IntoIterator for VariableList<T, N> {
type Item = T;
type IntoIter = std::vec::IntoIter<T>;
fn into_iter(self) -> Self::IntoIter {
self.vec.into_iter()
}
}
impl<T, N: Unsigned> tree_hash::TreeHash for VariableList<T, N>
where
T: tree_hash::TreeHash,
{
fn tree_hash_type() -> tree_hash::TreeHashType {
tree_hash::TreeHashType::List
}
fn tree_hash_packed_encoding(&self) -> tree_hash::PackedEncoding {
unreachable!("List should never be packed.")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("List should never be packed.")
}
fn tree_hash_root(&self) -> Hash256 {
let root = vec_tree_hash_root::<T, N>(&self.vec);
tree_hash::mix_in_length(&root, self.len())
}
}
impl<T, N: Unsigned> ssz::Encode for VariableList<T, N>
where
T: ssz::Encode,
{
fn is_ssz_fixed_len() -> bool {
<Vec<T>>::is_ssz_fixed_len()
}
fn ssz_fixed_len() -> usize {
<Vec<T>>::ssz_fixed_len()
}
fn ssz_bytes_len(&self) -> usize {
self.vec.ssz_bytes_len()
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
self.vec.ssz_append(buf)
}
}
impl<T, N> ssz::Decode for VariableList<T, N>
where
T: ssz::Decode,
N: Unsigned,
{
fn is_ssz_fixed_len() -> bool {
false
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, ssz::DecodeError> {
let max_len = N::to_usize();
if bytes.is_empty() {
Ok(vec![].into())
} else if T::is_ssz_fixed_len() {
let num_items = bytes
.len()
.checked_div(T::ssz_fixed_len())
.ok_or(ssz::DecodeError::ZeroLengthItem)?;
if num_items > max_len {
return Err(ssz::DecodeError::BytesInvalid(format!(
"VariableList of {} items exceeds maximum of {}",
num_items, max_len
)));
}
bytes
.chunks(T::ssz_fixed_len())
.try_fold(Vec::with_capacity(num_items), |mut vec, chunk| {
vec.push(T::from_ssz_bytes(chunk)?);
Ok(vec)
})
.map(Into::into)
} else {
ssz::decode_list_of_variable_length_items(bytes, Some(max_len))
.map(|vec: Vec<_>| vec.into())
}
}
}
#[cfg(feature = "arbitrary")]
impl<'a, T: arbitrary::Arbitrary<'a>, N: 'static + Unsigned> arbitrary::Arbitrary<'a>
for VariableList<T, N>
{
fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
let max_size = N::to_usize();
let rand = usize::arbitrary(u)?;
let size = std::cmp::min(rand, max_size);
let mut vec: Vec<T> = Vec::with_capacity(size);
for _ in 0..size {
vec.push(<T>::arbitrary(u)?);
}
Self::new(vec).map_err(|_| arbitrary::Error::IncorrectFormat)
}
}
#[cfg(test)]
mod test {
use super::*;
use ssz::*;
use tree_hash::{merkle_root, TreeHash};
use tree_hash_derive::TreeHash;
use typenum::*;
#[test]
fn new() {
let vec = vec![42; 5];
let fixed: Result<VariableList<u64, U4>, _> = VariableList::new(vec);
assert!(fixed.is_err());
let vec = vec![42; 3];
let fixed: Result<VariableList<u64, U4>, _> = VariableList::new(vec);
assert!(fixed.is_ok());
let vec = vec![42; 4];
let fixed: Result<VariableList<u64, U4>, _> = VariableList::new(vec);
assert!(fixed.is_ok());
}
#[test]
fn indexing() {
let vec = vec![1, 2];
let mut fixed: VariableList<u64, U8192> = vec.clone().into();
assert_eq!(fixed[0], 1);
assert_eq!(&fixed[0..1], &vec[0..1]);
assert_eq!((fixed[..]).len(), 2);
fixed[1] = 3;
assert_eq!(fixed[1], 3);
}
#[test]
fn length() {
let vec = vec![42; 5];
let fixed: VariableList<u64, U4> = VariableList::from(vec.clone());
assert_eq!(&fixed[..], &vec[0..4]);
let vec = vec![42; 3];
let fixed: VariableList<u64, U4> = VariableList::from(vec.clone());
assert_eq!(&fixed[0..3], &vec[..]);
assert_eq!(&fixed[..], &vec![42, 42, 42][..]);
let vec = vec![];
let fixed: VariableList<u64, U4> = VariableList::from(vec);
assert_eq!(&fixed[..], &[] as &[u64]);
}
#[test]
fn deref() {
let vec = vec![0, 2, 4, 6];
let fixed: VariableList<u64, U4> = VariableList::from(vec);
assert_eq!(fixed.first(), Some(&0));
assert_eq!(fixed.get(3), Some(&6));
assert_eq!(fixed.get(4), None);
}
#[test]
fn encode() {
let vec: VariableList<u16, U2> = vec![0; 2].into();
assert_eq!(vec.as_ssz_bytes(), vec![0, 0, 0, 0]);
assert_eq!(<VariableList<u16, U2> as Encode>::ssz_fixed_len(), 4);
}
fn round_trip<T: Encode + Decode + std::fmt::Debug + PartialEq>(item: T) {
let encoded = &item.as_ssz_bytes();
assert_eq!(item.ssz_bytes_len(), encoded.len());
assert_eq!(T::from_ssz_bytes(encoded), Ok(item));
}
#[test]
fn u16_len_8() {
round_trip::<VariableList<u16, U8>>(vec![42; 8].into());
round_trip::<VariableList<u16, U8>>(vec![0; 8].into());
}
fn root_with_length(bytes: &[u8], len: usize) -> Hash256 {
let root = merkle_root(bytes, 0);
tree_hash::mix_in_length(&root, len)
}
#[test]
fn tree_hash_u8() {
let fixed: VariableList<u8, U0> = VariableList::from(vec![]);
assert_eq!(fixed.tree_hash_root(), root_with_length(&[0; 8], 0));
for i in 0..=1 {
let fixed: VariableList<u8, U1> = VariableList::from(vec![0; i]);
assert_eq!(fixed.tree_hash_root(), root_with_length(&vec![0; i], i));
}
for i in 0..=8 {
let fixed: VariableList<u8, U8> = VariableList::from(vec![0; i]);
assert_eq!(fixed.tree_hash_root(), root_with_length(&vec![0; i], i));
}
for i in 0..=13 {
let fixed: VariableList<u8, U13> = VariableList::from(vec![0; i]);
assert_eq!(fixed.tree_hash_root(), root_with_length(&vec![0; i], i));
}
for i in 0..=16 {
let fixed: VariableList<u8, U16> = VariableList::from(vec![0; i]);
assert_eq!(fixed.tree_hash_root(), root_with_length(&vec![0; i], i));
}
let source: Vec<u8> = (0..16).collect();
let fixed: VariableList<u8, U16> = VariableList::from(source.clone());
assert_eq!(fixed.tree_hash_root(), root_with_length(&source, 16));
}
#[derive(Clone, Copy, TreeHash, Default)]
struct A {
a: u32,
b: u32,
}
fn repeat(input: &[u8], n: usize) -> Vec<u8> {
let mut output = vec![];
for _ in 0..n {
output.append(&mut input.to_vec());
}
output
}
fn padded_root_with_length(bytes: &[u8], len: usize, min_nodes: usize) -> Hash256 {
let root = merkle_root(bytes, min_nodes);
tree_hash::mix_in_length(&root, len)
}
#[test]
fn tree_hash_composite() {
let a = A { a: 0, b: 1 };
let fixed: VariableList<A, U0> = VariableList::from(vec![]);
assert_eq!(
fixed.tree_hash_root(),
padded_root_with_length(&[0; 32], 0, 0),
);
for i in 0..=1 {
let fixed: VariableList<A, U1> = VariableList::from(vec![a; i]);
assert_eq!(
fixed.tree_hash_root(),
padded_root_with_length(&repeat(a.tree_hash_root().as_bytes(), i), i, 1),
"U1 {}",
i
);
}
for i in 0..=8 {
let fixed: VariableList<A, U8> = VariableList::from(vec![a; i]);
assert_eq!(
fixed.tree_hash_root(),
padded_root_with_length(&repeat(a.tree_hash_root().as_bytes(), i), i, 8),
"U8 {}",
i
);
}
for i in 0..=13 {
let fixed: VariableList<A, U13> = VariableList::from(vec![a; i]);
assert_eq!(
fixed.tree_hash_root(),
padded_root_with_length(&repeat(a.tree_hash_root().as_bytes(), i), i, 13),
"U13 {}",
i
);
}
for i in 0..=16 {
let fixed: VariableList<A, U16> = VariableList::from(vec![a; i]);
assert_eq!(
fixed.tree_hash_root(),
padded_root_with_length(&repeat(a.tree_hash_root().as_bytes(), i), i, 16),
"U16 {}",
i
);
}
}
}

View File

@ -13,15 +13,15 @@ tokio = { version = "1.14.0", features = ["rt-multi-thread"] }
bls = { path = "../../crypto/bls" } bls = { path = "../../crypto/bls" }
integer-sqrt = "0.1.5" integer-sqrt = "0.1.5"
itertools = "0.10.0" itertools = "0.10.0"
eth2_ssz = "0.4.1" ethereum_ssz = "0.5.0"
eth2_ssz_derive = "0.3.1" ethereum_ssz_derive = "0.5.0"
eth2_ssz_types = "0.2.2" ssz_types = "0.5.0"
merkle_proof = { path = "../merkle_proof" } merkle_proof = { path = "../merkle_proof" }
safe_arith = { path = "../safe_arith" } safe_arith = { path = "../safe_arith" }
tree_hash = "0.4.1" tree_hash = "0.5.0"
types = { path = "../types", default-features = false } types = { path = "../types", default-features = false }
rayon = "1.4.1" rayon = "1.4.1"
eth2_hashing = "0.3.0" ethereum_hashing = "1.0.0-beta.2"
int_to_bytes = { path = "../int_to_bytes" } int_to_bytes = { path = "../int_to_bytes" }
smallvec = "1.6.1" smallvec = "1.6.1"
arbitrary = { version = "1.0", features = ["derive"], optional = true } arbitrary = { version = "1.0", features = ["derive"], optional = true }
@ -39,7 +39,7 @@ arbitrary-fuzz = [
"types/arbitrary-fuzz", "types/arbitrary-fuzz",
"bls/arbitrary", "bls/arbitrary",
"merkle_proof/arbitrary", "merkle_proof/arbitrary",
"eth2_ssz/arbitrary", "ethereum_ssz/arbitrary",
"eth2_ssz_types/arbitrary", "ssz_types/arbitrary",
"tree_hash/arbitrary", "tree_hash/arbitrary",
] ]

View File

@ -1,4 +1,4 @@
use eth2_hashing::hash; use ethereum_hashing::hash;
use int_to_bytes::int_to_bytes32; use int_to_bytes::int_to_bytes32;
use merkle_proof::{MerkleTree, MerkleTreeError}; use merkle_proof::{MerkleTree, MerkleTreeError};
use safe_arith::SafeArith; use safe_arith::SafeArith;

View File

@ -1,7 +1,7 @@
use super::errors::{BlockOperationError, BlsExecutionChangeInvalid as Invalid}; use super::errors::{BlockOperationError, BlsExecutionChangeInvalid as Invalid};
use crate::per_block_processing::signature_sets::bls_execution_change_signature_set; use crate::per_block_processing::signature_sets::bls_execution_change_signature_set;
use crate::VerifySignatures; use crate::VerifySignatures;
use eth2_hashing::hash; use ethereum_hashing::hash;
use types::*; use types::*;
type Result<T> = std::result::Result<T, BlockOperationError<Invalid>>; type Result<T> = std::result::Result<T, BlockOperationError<Invalid>>;

View File

@ -12,7 +12,7 @@ harness = false
criterion = "0.3.3" criterion = "0.3.3"
[dependencies] [dependencies]
eth2_hashing = "0.3.0" ethereum_hashing = "1.0.0-beta.2"
ethereum-types = "0.14.1" ethereum-types = "0.14.1"
[features] [features]

View File

@ -1,5 +1,5 @@
use crate::Hash256; use crate::Hash256;
use eth2_hashing::{Context, Sha256Context}; use ethereum_hashing::{Context, Sha256Context};
use std::cmp::max; use std::cmp::max;
/// Return `p(index)` in a pseudorandom permutation `p` of `0...list_size-1` with ``seed`` as entropy. /// Return `p(index)` in a pseudorandom permutation `p` of `0...list_size-1` with ``seed`` as entropy.

View File

@ -1,5 +1,5 @@
use crate::Hash256; use crate::Hash256;
use eth2_hashing::hash_fixed; use ethereum_hashing::hash_fixed;
use std::mem; use std::mem;
const SEED_SIZE: usize = 32; const SEED_SIZE: usize = 32;

View File

@ -1,23 +0,0 @@
[package]
name = "tree_hash"
version = "0.4.1"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2021"
license = "Apache-2.0"
description = "Efficient Merkle-hashing as used in Ethereum 2.0"
[dev-dependencies]
rand = "0.8.5"
tree_hash_derive = "0.4.0"
types = { path = "../types" }
beacon_chain = { path = "../../beacon_node/beacon_chain" }
eth2_ssz = "0.4.1"
eth2_ssz_derive = "0.3.1"
[dependencies]
ethereum-types = "0.14.1"
eth2_hashing = "0.3.0"
smallvec = "1.6.1"
[features]
arbitrary = ["ethereum-types/arbitrary"]

View File

@ -1,50 +0,0 @@
use beacon_chain::test_utils::{BeaconChainHarness, EphemeralHarnessType};
use types::{BeaconState, EthSpec, MainnetEthSpec};
const TREE_HASH_LOOPS: usize = 1_000;
const VALIDATOR_COUNT: usize = 1_000;
fn get_harness<T: EthSpec>() -> BeaconChainHarness<EphemeralHarnessType<T>> {
let harness = BeaconChainHarness::builder(T::default())
.default_spec()
.deterministic_keypairs(VALIDATOR_COUNT)
.fresh_ephemeral_store()
.build();
harness.advance_slot();
harness
}
fn build_state<T: EthSpec>() -> BeaconState<T> {
let state = get_harness::<T>().chain.head_beacon_state_cloned();
assert_eq!(state.as_base().unwrap().validators.len(), VALIDATOR_COUNT);
assert_eq!(state.as_base().unwrap().balances.len(), VALIDATOR_COUNT);
assert!(state
.as_base()
.unwrap()
.previous_epoch_attestations
.is_empty());
assert!(state
.as_base()
.unwrap()
.current_epoch_attestations
.is_empty());
assert!(state.as_base().unwrap().eth1_data_votes.is_empty());
assert!(state.as_base().unwrap().historical_roots.is_empty());
state
}
fn main() {
let state = build_state::<MainnetEthSpec>();
// This vec is an attempt to ensure the compiler doesn't optimize-out the hashing.
let mut vec = Vec::with_capacity(TREE_HASH_LOOPS);
for _ in 0..TREE_HASH_LOOPS {
let root = state.canonical_root();
vec.push(root[0]);
}
}

View File

@ -1,222 +0,0 @@
use super::*;
use ethereum_types::{H160, H256, U128, U256};
fn int_to_hash256(int: u64) -> Hash256 {
let mut bytes = [0; HASHSIZE];
bytes[0..8].copy_from_slice(&int.to_le_bytes());
Hash256::from_slice(&bytes)
}
macro_rules! impl_for_bitsize {
($type: ident, $bit_size: expr) => {
impl TreeHash for $type {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Basic
}
fn tree_hash_packed_encoding(&self) -> PackedEncoding {
PackedEncoding::from_slice(&self.to_le_bytes())
}
fn tree_hash_packing_factor() -> usize {
HASHSIZE / ($bit_size / 8)
}
#[allow(clippy::cast_lossless)] // Lint does not apply to all uses of this macro.
fn tree_hash_root(&self) -> Hash256 {
int_to_hash256(*self as u64)
}
}
};
}
impl_for_bitsize!(u8, 8);
impl_for_bitsize!(u16, 16);
impl_for_bitsize!(u32, 32);
impl_for_bitsize!(u64, 64);
impl_for_bitsize!(usize, 64);
impl TreeHash for bool {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Basic
}
fn tree_hash_packed_encoding(&self) -> PackedEncoding {
(*self as u8).tree_hash_packed_encoding()
}
fn tree_hash_packing_factor() -> usize {
u8::tree_hash_packing_factor()
}
fn tree_hash_root(&self) -> Hash256 {
int_to_hash256(*self as u64)
}
}
/// Only valid for byte types less than 32 bytes.
macro_rules! impl_for_lt_32byte_u8_array {
($len: expr) => {
impl TreeHash for [u8; $len] {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Vector
}
fn tree_hash_packed_encoding(&self) -> PackedEncoding {
unreachable!("bytesN should never be packed.")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("bytesN should never be packed.")
}
fn tree_hash_root(&self) -> Hash256 {
let mut result = [0; 32];
result[0..$len].copy_from_slice(&self[..]);
Hash256::from_slice(&result)
}
}
};
}
impl_for_lt_32byte_u8_array!(4);
impl_for_lt_32byte_u8_array!(32);
impl TreeHash for [u8; 48] {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Vector
}
fn tree_hash_packed_encoding(&self) -> PackedEncoding {
unreachable!("Vector should never be packed.")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("Vector should never be packed.")
}
fn tree_hash_root(&self) -> Hash256 {
let values_per_chunk = BYTES_PER_CHUNK;
let minimum_chunk_count = (48 + values_per_chunk - 1) / values_per_chunk;
merkle_root(self, minimum_chunk_count)
}
}
impl TreeHash for U128 {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Basic
}
fn tree_hash_packed_encoding(&self) -> PackedEncoding {
let mut result = [0; 16];
self.to_little_endian(&mut result);
PackedEncoding::from_slice(&result)
}
fn tree_hash_packing_factor() -> usize {
2
}
fn tree_hash_root(&self) -> Hash256 {
let mut result = [0; HASHSIZE];
self.to_little_endian(&mut result[0..16]);
Hash256::from_slice(&result)
}
}
impl TreeHash for U256 {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Basic
}
fn tree_hash_packed_encoding(&self) -> PackedEncoding {
let mut result = [0; 32];
self.to_little_endian(&mut result);
PackedEncoding::from_slice(&result)
}
fn tree_hash_packing_factor() -> usize {
1
}
fn tree_hash_root(&self) -> Hash256 {
let mut result = [0; 32];
self.to_little_endian(&mut result[..]);
Hash256::from_slice(&result)
}
}
impl TreeHash for H160 {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Vector
}
fn tree_hash_packed_encoding(&self) -> PackedEncoding {
let mut result = [0; 32];
result[0..20].copy_from_slice(self.as_bytes());
PackedEncoding::from_slice(&result)
}
fn tree_hash_packing_factor() -> usize {
1
}
fn tree_hash_root(&self) -> Hash256 {
let mut result = [0; 32];
result[0..20].copy_from_slice(self.as_bytes());
Hash256::from_slice(&result)
}
}
impl TreeHash for H256 {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Vector
}
fn tree_hash_packed_encoding(&self) -> PackedEncoding {
PackedEncoding::from_slice(self.as_bytes())
}
fn tree_hash_packing_factor() -> usize {
1
}
fn tree_hash_root(&self) -> Hash256 {
*self
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn bool() {
let mut true_bytes: Vec<u8> = vec![1];
true_bytes.append(&mut vec![0; 31]);
let false_bytes: Vec<u8> = vec![0; 32];
assert_eq!(true.tree_hash_root().as_bytes(), true_bytes.as_slice());
assert_eq!(false.tree_hash_root().as_bytes(), false_bytes.as_slice());
}
#[test]
fn int_to_bytes() {
assert_eq!(int_to_hash256(0).as_bytes(), &[0; 32]);
assert_eq!(
int_to_hash256(1).as_bytes(),
&[
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0
]
);
assert_eq!(
int_to_hash256(u64::max_value()).as_bytes(),
&[
255, 255, 255, 255, 255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
);
}
}

View File

@ -1,208 +0,0 @@
pub mod impls;
mod merkle_hasher;
mod merkleize_padded;
mod merkleize_standard;
pub use merkle_hasher::{Error, MerkleHasher};
pub use merkleize_padded::merkleize_padded;
pub use merkleize_standard::merkleize_standard;
use eth2_hashing::{hash_fixed, ZERO_HASHES, ZERO_HASHES_MAX_INDEX};
use smallvec::SmallVec;
pub const BYTES_PER_CHUNK: usize = 32;
pub const HASHSIZE: usize = 32;
pub const MERKLE_HASH_CHUNK: usize = 2 * BYTES_PER_CHUNK;
pub const MAX_UNION_SELECTOR: u8 = 127;
pub const SMALLVEC_SIZE: usize = 32;
pub type Hash256 = ethereum_types::H256;
pub type PackedEncoding = SmallVec<[u8; SMALLVEC_SIZE]>;
/// Convenience method for `MerkleHasher` which also provides some fast-paths for small trees.
///
/// `minimum_leaf_count` will only be used if it is greater than or equal to the minimum number of leaves that can be created from `bytes`.
pub fn merkle_root(bytes: &[u8], minimum_leaf_count: usize) -> Hash256 {
let leaves = std::cmp::max(
(bytes.len() + (HASHSIZE - 1)) / HASHSIZE,
minimum_leaf_count,
);
if leaves == 0 {
// If there are no bytes then the hash is always zero.
Hash256::zero()
} else if leaves == 1 {
// If there is only one leaf, the hash is always those leaf bytes padded out to 32-bytes.
let mut hash = [0; HASHSIZE];
hash[0..bytes.len()].copy_from_slice(bytes);
Hash256::from_slice(&hash)
} else if leaves == 2 {
// If there are only two leaves (this is common with BLS pubkeys), we can avoid some
// overhead with `MerkleHasher` and just do a simple 3-node tree here.
let mut leaves = [0; HASHSIZE * 2];
leaves[0..bytes.len()].copy_from_slice(bytes);
Hash256::from_slice(&hash_fixed(&leaves))
} else {
// If there are 3 or more leaves, use `MerkleHasher`.
let mut hasher = MerkleHasher::with_leaves(leaves);
hasher
.write(bytes)
.expect("the number of leaves is adequate for the number of bytes");
hasher
.finish()
.expect("the number of leaves is adequate for the number of bytes")
}
}
/// Returns the node created by hashing `root` and `length`.
///
/// Used in `TreeHash` for inserting the length of a list above it's root.
pub fn mix_in_length(root: &Hash256, length: usize) -> Hash256 {
let usize_len = std::mem::size_of::<usize>();
let mut length_bytes = [0; BYTES_PER_CHUNK];
length_bytes[0..usize_len].copy_from_slice(&length.to_le_bytes());
Hash256::from_slice(&eth2_hashing::hash32_concat(root.as_bytes(), &length_bytes)[..])
}
/// Returns `Some(root)` created by hashing `root` and `selector`, if `selector <=
/// MAX_UNION_SELECTOR`. Otherwise, returns `None`.
///
/// Used in `TreeHash` for the "union" type.
///
/// ## Specification
///
/// ```ignore,text
/// mix_in_selector: Given a Merkle root root and a type selector selector ("uint256" little-endian
/// serialization) return hash(root + selector).
/// ```
///
/// https://github.com/ethereum/consensus-specs/blob/v1.1.0-beta.3/ssz/simple-serialize.md#union
pub fn mix_in_selector(root: &Hash256, selector: u8) -> Option<Hash256> {
if selector > MAX_UNION_SELECTOR {
return None;
}
let mut chunk = [0; BYTES_PER_CHUNK];
chunk[0] = selector;
let root = eth2_hashing::hash32_concat(root.as_bytes(), &chunk);
Some(Hash256::from_slice(&root))
}
/// Returns a cached padding node for a given height.
fn get_zero_hash(height: usize) -> &'static [u8] {
if height <= ZERO_HASHES_MAX_INDEX {
&ZERO_HASHES[height]
} else {
panic!("Tree exceeds MAX_TREE_DEPTH of {}", ZERO_HASHES_MAX_INDEX)
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum TreeHashType {
Basic,
Vector,
List,
Container,
}
pub trait TreeHash {
fn tree_hash_type() -> TreeHashType;
fn tree_hash_packed_encoding(&self) -> PackedEncoding;
fn tree_hash_packing_factor() -> usize;
fn tree_hash_root(&self) -> Hash256;
}
/// Punch through references.
impl<'a, T> TreeHash for &'a T
where
T: TreeHash,
{
fn tree_hash_type() -> TreeHashType {
T::tree_hash_type()
}
fn tree_hash_packed_encoding(&self) -> PackedEncoding {
T::tree_hash_packed_encoding(*self)
}
fn tree_hash_packing_factor() -> usize {
T::tree_hash_packing_factor()
}
fn tree_hash_root(&self) -> Hash256 {
T::tree_hash_root(*self)
}
}
#[macro_export]
macro_rules! tree_hash_ssz_encoding_as_vector {
($type: ident) => {
impl tree_hash::TreeHash for $type {
fn tree_hash_type() -> tree_hash::TreeHashType {
tree_hash::TreeHashType::Vector
}
fn tree_hash_packed_encoding(&self) -> PackedEncoding {
unreachable!("Vector should never be packed.")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("Vector should never be packed.")
}
fn tree_hash_root(&self) -> Vec<u8> {
tree_hash::merkle_root(&ssz::ssz_encode(self))
}
}
};
}
#[macro_export]
macro_rules! tree_hash_ssz_encoding_as_list {
($type: ident) => {
impl tree_hash::TreeHash for $type {
fn tree_hash_type() -> tree_hash::TreeHashType {
tree_hash::TreeHashType::List
}
fn tree_hash_packed_encoding(&self) -> PackedEncoding {
unreachable!("List should never be packed.")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("List should never be packed.")
}
fn tree_hash_root(&self) -> Vec<u8> {
ssz::ssz_encode(self).tree_hash_root()
}
}
};
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn mix_length() {
let hash = {
let mut preimage = vec![42; BYTES_PER_CHUNK];
preimage.append(&mut vec![42]);
preimage.append(&mut vec![0; BYTES_PER_CHUNK - 1]);
eth2_hashing::hash(&preimage)
};
assert_eq!(
mix_in_length(&Hash256::from_slice(&[42; BYTES_PER_CHUNK]), 42).as_bytes(),
&hash[..]
);
}
}

View File

@ -1,573 +0,0 @@
use crate::{get_zero_hash, Hash256, HASHSIZE};
use eth2_hashing::{Context, Sha256Context, HASH_LEN};
use smallvec::{smallvec, SmallVec};
use std::mem;
type SmallVec8<T> = SmallVec<[T; 8]>;
#[derive(Clone, Debug, PartialEq)]
pub enum Error {
/// The maximum number of leaves defined by the initialization `depth` has been exceed.
MaximumLeavesExceeded { max_leaves: usize },
}
/// Helper struct to store either a hash digest or a slice.
///
/// Should be used as a left or right value for some node.
enum Preimage<'a> {
Digest([u8; HASH_LEN]),
Slice(&'a [u8]),
}
impl<'a> Preimage<'a> {
/// Returns a 32-byte slice.
fn as_bytes(&self) -> &[u8] {
match self {
Preimage::Digest(digest) => digest.as_ref(),
Preimage::Slice(slice) => slice,
}
}
}
/// A node that has had a left child supplied, but not a right child.
struct HalfNode {
/// The hasher context.
context: Context,
/// The tree id of the node. The root node has in id of `1` and ids increase moving down the
/// tree from left to right.
id: usize,
}
impl HalfNode {
/// Create a new half-node from the given `left` value.
fn new(id: usize, left: Preimage) -> Self {
let mut context = Context::new();
context.update(left.as_bytes());
Self { context, id }
}
/// Complete the half-node by providing a `right` value. Returns a digest of the left and right
/// nodes.
fn finish(mut self, right: Preimage) -> [u8; HASH_LEN] {
self.context.update(right.as_bytes());
self.context.finalize()
}
}
/// Provides a Merkle-root hasher that allows for streaming bytes (i.e., providing any-length byte
/// slices without need to separate into leaves). Efficiently handles cases where not all leaves
/// have been provided by assuming all non-provided leaves are `[0; 32]` and pre-computing the
/// zero-value hashes at all depths of the tree.
///
/// This algorithm aims to allocate as little memory as possible and it does this by "folding" up
/// the tree as each leaf is provided. Consider this step-by-step functional diagram of hashing a
/// tree with depth three:
///
/// ## Functional Diagram
///
/// Nodes that are `-` have not been defined and do not occupy memory. Nodes that are `L` are
/// leaves that are provided but are not stored. Nodes that have integers (`1`, `2`) are stored in
/// our struct. Finally, nodes that are `X` were stored, but are now removed.
///
/// ### Start
///
/// ```ignore
/// -
/// / \
/// - -
/// / \ / \
/// - - - -
/// ```
///
/// ### Provide first leaf
///
/// ```ignore
/// -
/// / \
/// 2 -
/// / \ / \
/// L - - -
/// ```
///
/// ### Provide second leaf
///
/// ```ignore
/// 1
/// / \
/// X -
/// / \ / \
/// L L - -
/// ```
///
/// ### Provide third leaf
///
/// ```ignore
/// 1
/// / \
/// X 3
/// / \ / \
/// L L L -
/// ```
///
/// ### Provide fourth and final leaf
///
/// ```ignore
/// 1
/// / \
/// X X
/// / \ / \
/// L L L L
/// ```
///
pub struct MerkleHasher {
/// Stores the nodes that are half-complete and awaiting a right node.
///
/// A smallvec of size 8 means we can hash a tree with 256 leaves without allocating on the
/// heap. Each half-node is 232 bytes, so this smallvec may store 1856 bytes on the stack.
half_nodes: SmallVec8<HalfNode>,
/// The depth of the tree that will be produced.
///
/// Depth is counted top-down (i.e., the root node is at depth 0). A tree with 1 leaf has a
/// depth of 1, a tree with 4 leaves has a depth of 3.
depth: usize,
/// The next leaf that we are expecting to process.
next_leaf: usize,
/// A buffer of bytes that are waiting to be written to a leaf.
buffer: SmallVec<[u8; 32]>,
/// Set to Some(root) when the root of the tree is known.
root: Option<Hash256>,
}
/// Returns the parent of node with id `i`.
fn get_parent(i: usize) -> usize {
i / 2
}
/// Gets the depth of a node with an id of `i`.
///
/// It is a logic error to provide `i == 0`.
///
/// E.g., if `i` is 1, depth is 0. If `i` is is 1, depth is 1.
fn get_depth(i: usize) -> usize {
let total_bits = mem::size_of::<usize>() * 8;
total_bits - i.leading_zeros() as usize - 1
}
impl MerkleHasher {
/// Instantiate a hasher for a tree with a given number of leaves.
///
/// `num_leaves` will be rounded to the next power of two. E.g., if `num_leaves == 6`, then the
/// tree will _actually_ be able to accomodate 8 leaves and the resulting hasher is exactly the
/// same as one that was instantiated with `Self::with_leaves(8)`.
///
/// ## Notes
///
/// If `num_leaves == 0`, a tree of depth 1 will be created. If no leaves are provided it will
/// return a root of `[0; 32]`.
pub fn with_leaves(num_leaves: usize) -> Self {
let depth = get_depth(num_leaves.next_power_of_two()) + 1;
Self::with_depth(depth)
}
/// Instantiates a new, empty hasher for a tree with `depth` layers which will have capacity
/// for `1 << (depth - 1)` leaf nodes.
///
/// It is not possible to grow the depth of the tree after instantiation.
///
/// ## Panics
///
/// Panics if `depth == 0`.
fn with_depth(depth: usize) -> Self {
assert!(depth > 0, "merkle tree cannot have a depth of zero");
Self {
half_nodes: SmallVec::with_capacity(depth - 1),
depth,
next_leaf: 1 << (depth - 1),
buffer: SmallVec::with_capacity(32),
root: None,
}
}
/// Write some bytes to the hasher.
///
/// ## Errors
///
/// Returns an error if the given bytes would create a leaf that would exceed the maximum
/// permissible number of leaves defined by the initialization `depth`. E.g., a tree of `depth
/// == 2` can only accept 2 leaves. A tree of `depth == 14` can only accept 8,192 leaves.
pub fn write(&mut self, bytes: &[u8]) -> Result<(), Error> {
let mut ptr = 0;
while ptr <= bytes.len() {
let slice = &bytes[ptr..std::cmp::min(bytes.len(), ptr + HASHSIZE)];
if self.buffer.is_empty() && slice.len() == HASHSIZE {
self.process_leaf(slice)?;
ptr += HASHSIZE
} else if self.buffer.len() + slice.len() < HASHSIZE {
self.buffer.extend_from_slice(slice);
ptr += HASHSIZE
} else {
let buf_len = self.buffer.len();
let required = HASHSIZE - buf_len;
let mut leaf = [0; HASHSIZE];
leaf[..buf_len].copy_from_slice(&self.buffer);
leaf[buf_len..].copy_from_slice(&slice[0..required]);
self.process_leaf(&leaf)?;
self.buffer = smallvec![];
ptr += required
}
}
Ok(())
}
/// Process the next leaf in the tree.
///
/// ## Errors
///
/// Returns an error if the given leaf would exceed the maximum permissible number of leaves
/// defined by the initialization `depth`. E.g., a tree of `depth == 2` can only accept 2
/// leaves. A tree of `depth == 14` can only accept 8,192 leaves.
fn process_leaf(&mut self, leaf: &[u8]) -> Result<(), Error> {
assert_eq!(leaf.len(), HASHSIZE, "a leaf must be 32 bytes");
let max_leaves = 1 << (self.depth + 1);
if self.next_leaf > max_leaves {
return Err(Error::MaximumLeavesExceeded { max_leaves });
} else if self.next_leaf == 1 {
// A tree of depth one has a root that is equal to the first given leaf.
self.root = Some(Hash256::from_slice(leaf))
} else if self.next_leaf % 2 == 0 {
self.process_left_node(self.next_leaf, Preimage::Slice(leaf))
} else {
self.process_right_node(self.next_leaf, Preimage::Slice(leaf))
}
self.next_leaf += 1;
Ok(())
}
/// Returns the root of the Merkle tree.
///
/// If not all leaves have been provided, the tree will be efficiently completed under the
/// assumption that all not-yet-provided leaves are equal to `[0; 32]`.
///
/// ## Errors
///
/// Returns an error if the bytes remaining in the buffer would create a leaf that would exceed
/// the maximum permissible number of leaves defined by the initialization `depth`.
pub fn finish(mut self) -> Result<Hash256, Error> {
if !self.buffer.is_empty() {
let mut leaf = [0; HASHSIZE];
leaf[..self.buffer.len()].copy_from_slice(&self.buffer);
self.process_leaf(&leaf)?
}
// If the tree is incomplete, we must complete it by providing zero-hashes.
loop {
if let Some(root) = self.root {
break Ok(root);
} else if let Some(node) = self.half_nodes.last() {
let right_child = node.id * 2 + 1;
self.process_right_node(right_child, self.zero_hash(right_child));
} else if self.next_leaf == 1 {
// The next_leaf can only be 1 if the tree has a depth of one. If have been no
// leaves supplied, assume a root of zero.
break Ok(Hash256::zero());
} else {
// The only scenario where there are (a) no half nodes and (b) a tree of depth
// two or more is where no leaves have been supplied at all.
//
// Once we supply this first zero-hash leaf then all future operations will be
// triggered via the `process_right_node` branch.
self.process_left_node(self.next_leaf, self.zero_hash(self.next_leaf))
}
}
}
/// Process a node that will become the left-hand node of some parent. The supplied `id` is
/// that of the node (not the parent). The `preimage` is the value of the node (i.e., if this
/// is a leaf node it will be the value of that leaf).
///
/// In this scenario, the only option is to push a new half-node.
fn process_left_node(&mut self, id: usize, preimage: Preimage) {
self.half_nodes
.push(HalfNode::new(get_parent(id), preimage))
}
/// Process a node that will become the right-hand node of some parent. The supplied `id` is
/// that of the node (not the parent). The `preimage` is the value of the node (i.e., if this
/// is a leaf node it will be the value of that leaf).
///
/// This operation will always complete one node, then it will attempt to crawl up the tree and
/// collapse all other completed nodes. For example, consider a tree of depth 3 (see diagram
/// below). When providing the node with id `7`, the node with id `3` will be completed which
/// will also provide the right-node for the `1` node. This function will complete both of
/// those nodes and ultimately find the root of the tree.
///
/// ```ignore
/// 1 <-- completed
/// / \
/// 2 3 <-- completed
/// / \ / \
/// 4 5 6 7 <-- supplied right node
/// ```
fn process_right_node(&mut self, id: usize, mut preimage: Preimage) {
let mut parent = get_parent(id);
loop {
match self.half_nodes.last() {
Some(node) if node.id == parent => {
preimage = Preimage::Digest(
self.half_nodes
.pop()
.expect("if .last() is Some then .pop() must succeed")
.finish(preimage),
);
if parent == 1 {
self.root = Some(Hash256::from_slice(preimage.as_bytes()));
break;
} else {
parent = get_parent(parent);
}
}
_ => {
self.half_nodes.push(HalfNode::new(parent, preimage));
break;
}
}
}
}
/// Returns a "zero hash" from a pre-computed set for the given node.
///
/// Note: this node is not always zero, instead it is the result of hashing up a tree where the
/// leaves are all zeros. E.g., in a tree of depth 2, the `zero_hash` of a node at depth 1
/// will be `[0; 32]`. However, the `zero_hash` for a node at depth 0 will be
/// `hash(concat([0; 32], [0; 32])))`.
fn zero_hash(&self, id: usize) -> Preimage<'static> {
Preimage::Slice(get_zero_hash(self.depth - (get_depth(id) + 1)))
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::merkleize_padded;
/// This test is just to ensure that the stack size of the `Context` remains the same. We choose
/// our smallvec size based upon this, so it's good to know if it suddenly changes in size.
#[test]
fn context_size() {
assert_eq!(
mem::size_of::<HalfNode>(),
224,
"Halfnode size should be as expected"
);
}
fn compare_with_reference(leaves: &[Hash256], depth: usize) {
let reference_bytes = leaves
.iter()
.flat_map(|hash| hash.as_bytes())
.copied()
.collect::<Vec<_>>();
let reference_root = merkleize_padded(&reference_bytes, 1 << (depth - 1));
let merklizer_root_32_bytes = {
let mut m = MerkleHasher::with_depth(depth);
for leaf in leaves.iter() {
m.write(leaf.as_bytes()).expect("should process leaf");
}
m.finish().expect("should finish")
};
assert_eq!(
reference_root, merklizer_root_32_bytes,
"32 bytes should match reference root"
);
let merklizer_root_individual_3_bytes = {
let mut m = MerkleHasher::with_depth(depth);
for bytes in reference_bytes.chunks(3) {
m.write(bytes).expect("should process byte");
}
m.finish().expect("should finish")
};
assert_eq!(
reference_root, merklizer_root_individual_3_bytes,
"3 bytes should match reference root"
);
let merklizer_root_individual_single_bytes = {
let mut m = MerkleHasher::with_depth(depth);
for byte in reference_bytes.iter() {
m.write(&[*byte]).expect("should process byte");
}
m.finish().expect("should finish")
};
assert_eq!(
reference_root, merklizer_root_individual_single_bytes,
"single bytes should match reference root"
);
}
/// A simple wrapper to compare MerkleHasher to the reference function by just giving a number
/// of leaves and a depth.
fn compare_reference_with_len(leaves: u64, depth: usize) {
let leaves = (0..leaves)
.map(Hash256::from_low_u64_be)
.collect::<Vec<_>>();
compare_with_reference(&leaves, depth)
}
/// Compares the `MerkleHasher::with_depth` and `MerkleHasher::with_leaves` generate consistent
/// results.
fn compare_new_with_leaf_count(num_leaves: u64, depth: usize) {
let leaves = (0..num_leaves)
.map(Hash256::from_low_u64_be)
.collect::<Vec<_>>();
let from_depth = {
let mut m = MerkleHasher::with_depth(depth);
for leaf in leaves.iter() {
m.write(leaf.as_bytes()).expect("should process leaf");
}
m.finish()
};
let from_num_leaves = {
let mut m = MerkleHasher::with_leaves(num_leaves as usize);
for leaf in leaves.iter() {
m.process_leaf(leaf.as_bytes())
.expect("should process leaf");
}
m.finish()
};
assert_eq!(
from_depth, from_num_leaves,
"hash generated by depth should match that from num leaves"
);
}
#[test]
fn with_leaves() {
compare_new_with_leaf_count(1, 1);
compare_new_with_leaf_count(2, 2);
compare_new_with_leaf_count(3, 3);
compare_new_with_leaf_count(4, 3);
compare_new_with_leaf_count(5, 4);
compare_new_with_leaf_count(6, 4);
compare_new_with_leaf_count(7, 4);
compare_new_with_leaf_count(8, 4);
compare_new_with_leaf_count(9, 5);
compare_new_with_leaf_count(10, 5);
compare_new_with_leaf_count(11, 5);
compare_new_with_leaf_count(12, 5);
compare_new_with_leaf_count(13, 5);
compare_new_with_leaf_count(14, 5);
compare_new_with_leaf_count(15, 5);
}
#[test]
fn depth() {
assert_eq!(get_depth(1), 0);
assert_eq!(get_depth(2), 1);
assert_eq!(get_depth(3), 1);
assert_eq!(get_depth(4), 2);
assert_eq!(get_depth(5), 2);
assert_eq!(get_depth(6), 2);
assert_eq!(get_depth(7), 2);
assert_eq!(get_depth(8), 3);
}
#[test]
fn with_0_leaves() {
let hasher = MerkleHasher::with_leaves(0);
assert_eq!(hasher.finish().unwrap(), Hash256::zero());
}
#[test]
#[should_panic]
fn too_many_leaves() {
compare_reference_with_len(2, 1);
}
#[test]
fn full_trees() {
compare_reference_with_len(1, 1);
compare_reference_with_len(2, 2);
compare_reference_with_len(4, 3);
compare_reference_with_len(8, 4);
compare_reference_with_len(16, 5);
compare_reference_with_len(32, 6);
compare_reference_with_len(64, 7);
compare_reference_with_len(128, 8);
compare_reference_with_len(256, 9);
compare_reference_with_len(256, 9);
compare_reference_with_len(8192, 14);
}
#[test]
fn incomplete_trees() {
compare_reference_with_len(0, 1);
compare_reference_with_len(0, 2);
compare_reference_with_len(1, 2);
for i in 0..=4 {
compare_reference_with_len(i, 3);
}
for i in 0..=7 {
compare_reference_with_len(i, 4);
}
for i in 0..=15 {
compare_reference_with_len(i, 5);
}
for i in 0..=32 {
compare_reference_with_len(i, 6);
}
for i in 0..=64 {
compare_reference_with_len(i, 7);
}
compare_reference_with_len(0, 14);
compare_reference_with_len(13, 14);
compare_reference_with_len(8191, 14);
}
#[test]
fn remaining_buffer() {
let a = {
let mut m = MerkleHasher::with_leaves(2);
m.write(&[1]).expect("should write");
m.finish().expect("should finish")
};
let b = {
let mut m = MerkleHasher::with_leaves(2);
let mut leaf = vec![1];
leaf.extend_from_slice(&[0; 31]);
m.write(&leaf).expect("should write");
m.write(&[0; 32]).expect("should write");
m.finish().expect("should finish")
};
assert_eq!(a, b, "should complete buffer");
}
}

View File

@ -1,330 +0,0 @@
use super::{get_zero_hash, Hash256, BYTES_PER_CHUNK};
use eth2_hashing::{hash32_concat, hash_fixed};
/// Merkleize `bytes` and return the root, optionally padding the tree out to `min_leaves` number of
/// leaves.
///
/// **Note**: This function is generally worse than using the `crate::merkle_root` which uses
/// `MerkleHasher`. We only keep this function around for reference testing.
///
/// First all nodes are extracted from `bytes` and then a padding node is added until the number of
/// leaf chunks is greater than or equal to `min_leaves`. Callers may set `min_leaves` to `0` if no
/// adding additional chunks should be added to the given `bytes`.
///
/// If `bytes.len() <= BYTES_PER_CHUNK`, no hashing is done and `bytes` is returned, potentially
/// padded out to `BYTES_PER_CHUNK` length with `0`.
///
/// ## CPU Performance
///
/// A cache of `MAX_TREE_DEPTH` hashes are stored to avoid re-computing the hashes of padding nodes
/// (or their parents). Therefore, adding padding nodes only incurs one more hash per additional
/// height of the tree.
///
/// ## Memory Performance
///
/// This algorithm has two interesting memory usage properties:
///
/// 1. The maximum memory footprint is roughly `O(V / 2)` memory, where `V` is the number of leaf
/// chunks with values (i.e., leaves that are not padding). The means adding padding nodes to
/// the tree does not increase the memory footprint.
/// 2. At each height of the tree half of the memory is freed until only a single chunk is stored.
/// 3. The input `bytes` are not copied into another list before processing.
///
/// _Note: there are some minor memory overheads, including a handful of usizes and a list of
/// `MAX_TREE_DEPTH` hashes as `lazy_static` constants._
pub fn merkleize_padded(bytes: &[u8], min_leaves: usize) -> Hash256 {
// If the bytes are just one chunk or less, pad to one chunk and return without hashing.
if bytes.len() <= BYTES_PER_CHUNK && min_leaves <= 1 {
let mut o = bytes.to_vec();
o.resize(BYTES_PER_CHUNK, 0);
return Hash256::from_slice(&o);
}
assert!(
bytes.len() > BYTES_PER_CHUNK || min_leaves > 1,
"Merkle hashing only needs to happen if there is more than one chunk"
);
// The number of leaves that can be made directly from `bytes`.
let leaves_with_values = (bytes.len() + (BYTES_PER_CHUNK - 1)) / BYTES_PER_CHUNK;
// The number of parents that have at least one non-padding leaf.
//
// Since there is more than one node in this tree (see prior assertion), there should always be
// one or more initial parent nodes.
let initial_parents_with_values = std::cmp::max(1, next_even_number(leaves_with_values) / 2);
// The number of leaves in the full tree (including padding nodes).
let num_leaves = std::cmp::max(leaves_with_values, min_leaves).next_power_of_two();
// The number of levels in the tree.
//
// A tree with a single node has `height == 1`.
let height = num_leaves.trailing_zeros() as usize + 1;
assert!(height >= 2, "The tree should have two or more heights");
// A buffer/scratch-space used for storing each round of hashes at each height.
//
// This buffer is kept as small as possible; it will shrink so it never stores a padding node.
let mut chunks = ChunkStore::with_capacity(initial_parents_with_values);
// Create a parent in the `chunks` buffer for every two chunks in `bytes`.
//
// I.e., do the first round of hashing, hashing from the `bytes` slice and filling the `chunks`
// struct.
for i in 0..initial_parents_with_values {
let start = i * BYTES_PER_CHUNK * 2;
// Hash two chunks, creating a parent chunk.
let hash = match bytes.get(start..start + BYTES_PER_CHUNK * 2) {
// All bytes are available, hash as usual.
Some(slice) => hash_fixed(slice),
// Unable to get all the bytes, get a small slice and pad it out.
None => {
let mut preimage = bytes
.get(start..)
.expect("`i` can only be larger than zero if there are bytes to read")
.to_vec();
preimage.resize(BYTES_PER_CHUNK * 2, 0);
hash_fixed(&preimage)
}
};
assert_eq!(
hash.len(),
BYTES_PER_CHUNK,
"Hashes should be exactly one chunk"
);
// Store the parent node.
chunks
.set(i, &hash)
.expect("Buffer should always have capacity for parent nodes")
}
// Iterate through all heights above the leaf nodes and either (a) hash two children or, (b)
// hash a left child and a right padding node.
//
// Skip the 0'th height because the leaves have already been processed. Skip the highest-height
// in the tree as it is the root does not require hashing.
//
// The padding nodes for each height are cached via `lazy static` to simulate non-adjacent
// padding nodes (i.e., avoid doing unnecessary hashing).
for height in 1..height - 1 {
let child_nodes = chunks.len();
let parent_nodes = next_even_number(child_nodes) / 2;
// For each pair of nodes stored in `chunks`:
//
// - If two nodes are available, hash them to form a parent.
// - If one node is available, hash it and a cached padding node to form a parent.
for i in 0..parent_nodes {
let (left, right) = match (chunks.get(i * 2), chunks.get(i * 2 + 1)) {
(Ok(left), Ok(right)) => (left, right),
(Ok(left), Err(_)) => (left, get_zero_hash(height)),
// Deriving `parent_nodes` from `chunks.len()` has ensured that we never encounter the
// scenario where we expect two nodes but there are none.
(Err(_), Err(_)) => unreachable!("Parent must have one child"),
// `chunks` is a contiguous array so it is impossible for an index to be missing
// when a higher index is present.
(Err(_), Ok(_)) => unreachable!("Parent must have a left child"),
};
assert!(
left.len() == right.len() && right.len() == BYTES_PER_CHUNK,
"Both children should be `BYTES_PER_CHUNK` bytes."
);
let hash = hash32_concat(left, right);
// Store a parent node.
chunks
.set(i, &hash)
.expect("Buf is adequate size for parent");
}
// Shrink the buffer so it neatly fits the number of new nodes created in this round.
//
// The number of `parent_nodes` is either decreasing or stable. It never increases.
chunks.truncate(parent_nodes);
}
// There should be a single chunk left in the buffer and it is the Merkle root.
let root = chunks.into_vec();
assert_eq!(root.len(), BYTES_PER_CHUNK, "Only one chunk should remain");
Hash256::from_slice(&root)
}
/// A helper struct for storing words of `BYTES_PER_CHUNK` size in a flat byte array.
#[derive(Debug)]
struct ChunkStore(Vec<u8>);
impl ChunkStore {
/// Creates a new instance with `chunks` padding nodes.
fn with_capacity(chunks: usize) -> Self {
Self(vec![0; chunks * BYTES_PER_CHUNK])
}
/// Set the `i`th chunk to `value`.
///
/// Returns `Err` if `value.len() != BYTES_PER_CHUNK` or `i` is out-of-bounds.
fn set(&mut self, i: usize, value: &[u8]) -> Result<(), ()> {
if i < self.len() && value.len() == BYTES_PER_CHUNK {
let slice = &mut self.0[i * BYTES_PER_CHUNK..i * BYTES_PER_CHUNK + BYTES_PER_CHUNK];
slice.copy_from_slice(value);
Ok(())
} else {
Err(())
}
}
/// Gets the `i`th chunk.
///
/// Returns `Err` if `i` is out-of-bounds.
fn get(&self, i: usize) -> Result<&[u8], ()> {
if i < self.len() {
Ok(&self.0[i * BYTES_PER_CHUNK..i * BYTES_PER_CHUNK + BYTES_PER_CHUNK])
} else {
Err(())
}
}
/// Returns the number of chunks presently stored in `self`.
fn len(&self) -> usize {
self.0.len() / BYTES_PER_CHUNK
}
/// Truncates 'self' to `num_chunks` chunks.
///
/// Functionally identical to `Vec::truncate`.
fn truncate(&mut self, num_chunks: usize) {
self.0.truncate(num_chunks * BYTES_PER_CHUNK)
}
/// Consumes `self`, returning the underlying byte array.
fn into_vec(self) -> Vec<u8> {
self.0
}
}
/// Returns the next even number following `n`. If `n` is even, `n` is returned.
fn next_even_number(n: usize) -> usize {
n + n % 2
}
#[cfg(test)]
mod test {
use super::*;
use crate::ZERO_HASHES_MAX_INDEX;
pub fn reference_root(bytes: &[u8]) -> Hash256 {
crate::merkleize_standard(bytes)
}
macro_rules! common_tests {
($get_bytes: ident) => {
#[test]
fn zero_value_0_nodes() {
test_against_reference(&$get_bytes(0 * BYTES_PER_CHUNK), 0);
}
#[test]
fn zero_value_1_nodes() {
test_against_reference(&$get_bytes(1 * BYTES_PER_CHUNK), 0);
}
#[test]
fn zero_value_2_nodes() {
test_against_reference(&$get_bytes(2 * BYTES_PER_CHUNK), 0);
}
#[test]
fn zero_value_3_nodes() {
test_against_reference(&$get_bytes(3 * BYTES_PER_CHUNK), 0);
}
#[test]
fn zero_value_4_nodes() {
test_against_reference(&$get_bytes(4 * BYTES_PER_CHUNK), 0);
}
#[test]
fn zero_value_8_nodes() {
test_against_reference(&$get_bytes(8 * BYTES_PER_CHUNK), 0);
}
#[test]
fn zero_value_9_nodes() {
test_against_reference(&$get_bytes(9 * BYTES_PER_CHUNK), 0);
}
#[test]
fn zero_value_8_nodes_varying_min_length() {
for i in 0..64 {
test_against_reference(&$get_bytes(8 * BYTES_PER_CHUNK), i);
}
}
#[test]
fn zero_value_range_of_nodes() {
for i in 0..32 * BYTES_PER_CHUNK {
test_against_reference(&$get_bytes(i), 0);
}
}
#[test]
fn max_tree_depth_min_nodes() {
let input = vec![0; 10 * BYTES_PER_CHUNK];
let min_nodes = 2usize.pow(ZERO_HASHES_MAX_INDEX as u32);
assert_eq!(
merkleize_padded(&input, min_nodes).as_bytes(),
get_zero_hash(ZERO_HASHES_MAX_INDEX)
);
}
};
}
mod zero_value {
use super::*;
fn zero_bytes(bytes: usize) -> Vec<u8> {
vec![0; bytes]
}
common_tests!(zero_bytes);
}
mod random_value {
use super::*;
use rand::RngCore;
fn random_bytes(bytes: usize) -> Vec<u8> {
let mut bytes = Vec::with_capacity(bytes);
rand::thread_rng().fill_bytes(&mut bytes);
bytes
}
common_tests!(random_bytes);
}
fn test_against_reference(input: &[u8], min_nodes: usize) {
let mut reference_input = input.to_vec();
reference_input.resize(
std::cmp::max(
reference_input.len(),
min_nodes.next_power_of_two() * BYTES_PER_CHUNK,
),
0,
);
assert_eq!(
reference_root(&reference_input),
merkleize_padded(input, min_nodes),
"input.len(): {:?}",
input.len()
);
}
}

View File

@ -1,81 +0,0 @@
use super::*;
use eth2_hashing::hash;
/// Merkleizes bytes and returns the root, using a simple algorithm that does not optimize to avoid
/// processing or storing padding bytes.
///
/// **Note**: This function is generally worse than using the `crate::merkle_root` which uses
/// `MerkleHasher`. We only keep this function around for reference testing.
///
/// The input `bytes` will be padded to ensure that the number of leaves is a power-of-two.
///
/// ## CPU Performance
///
/// Will hash all nodes in the tree, even if they are padding and pre-determined.
///
/// ## Memory Performance
///
/// - Duplicates the input `bytes`.
/// - Stores all internal nodes, even if they are padding.
/// - Does not free up unused memory during operation.
pub fn merkleize_standard(bytes: &[u8]) -> Hash256 {
// If the bytes are just one chunk (or less than one chunk) just return them.
if bytes.len() <= HASHSIZE {
let mut o = bytes.to_vec();
o.resize(HASHSIZE, 0);
return Hash256::from_slice(&o[0..HASHSIZE]);
}
let leaves = num_sanitized_leaves(bytes.len());
let nodes = num_nodes(leaves);
let internal_nodes = nodes - leaves;
let num_bytes = std::cmp::max(internal_nodes, 1) * HASHSIZE + bytes.len();
let mut o: Vec<u8> = vec![0; internal_nodes * HASHSIZE];
o.append(&mut bytes.to_vec());
assert_eq!(o.len(), num_bytes);
let empty_chunk_hash = hash(&[0; MERKLE_HASH_CHUNK]);
let mut i = nodes * HASHSIZE;
let mut j = internal_nodes * HASHSIZE;
while i >= MERKLE_HASH_CHUNK {
i -= MERKLE_HASH_CHUNK;
j -= HASHSIZE;
let hash = match o.get(i..i + MERKLE_HASH_CHUNK) {
// All bytes are available, hash as usual.
Some(slice) => hash(slice),
// Unable to get all the bytes.
None => {
match o.get(i..) {
// Able to get some of the bytes, pad them out.
Some(slice) => {
let mut bytes = slice.to_vec();
bytes.resize(MERKLE_HASH_CHUNK, 0);
hash(&bytes)
}
// Unable to get any bytes, use the empty-chunk hash.
None => empty_chunk_hash.clone(),
}
}
};
o[j..j + HASHSIZE].copy_from_slice(&hash);
}
Hash256::from_slice(&o[0..HASHSIZE])
}
fn num_sanitized_leaves(num_bytes: usize) -> usize {
let leaves = (num_bytes + HASHSIZE - 1) / HASHSIZE;
leaves.next_power_of_two()
}
fn num_nodes(num_leaves: usize) -> usize {
2 * num_leaves - 1
}

View File

@ -1,128 +0,0 @@
use ssz_derive::Encode;
use tree_hash::{Hash256, MerkleHasher, PackedEncoding, TreeHash, BYTES_PER_CHUNK};
use tree_hash_derive::TreeHash;
#[derive(Encode)]
struct HashVec {
vec: Vec<u8>,
}
impl From<Vec<u8>> for HashVec {
fn from(vec: Vec<u8>) -> Self {
Self { vec }
}
}
impl tree_hash::TreeHash for HashVec {
fn tree_hash_type() -> tree_hash::TreeHashType {
tree_hash::TreeHashType::List
}
fn tree_hash_packed_encoding(&self) -> PackedEncoding {
unreachable!("List should never be packed.")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("List should never be packed.")
}
fn tree_hash_root(&self) -> Hash256 {
let mut hasher =
MerkleHasher::with_leaves((self.vec.len() + BYTES_PER_CHUNK - 1) / BYTES_PER_CHUNK);
for item in &self.vec {
hasher.write(&item.tree_hash_packed_encoding()).unwrap()
}
let root = hasher.finish().unwrap();
tree_hash::mix_in_length(&root, self.vec.len())
}
}
fn mix_in_selector(a: Hash256, selector: u8) -> Hash256 {
let mut b = [0; 32];
b[0] = selector;
Hash256::from_slice(&eth2_hashing::hash32_concat(a.as_bytes(), &b))
}
fn u8_hash_concat(v1: u8, v2: u8) -> Hash256 {
let mut a = [0; 32];
let mut b = [0; 32];
a[0] = v1;
b[0] = v2;
Hash256::from_slice(&eth2_hashing::hash32_concat(&a, &b))
}
fn u8_hash(x: u8) -> Hash256 {
let mut a = [0; 32];
a[0] = x;
Hash256::from_slice(&a)
}
#[derive(TreeHash)]
#[tree_hash(enum_behaviour = "transparent")]
enum FixedTrans {
A(u8),
B(u8),
}
#[test]
fn fixed_trans() {
assert_eq!(FixedTrans::A(2).tree_hash_root(), u8_hash(2));
assert_eq!(FixedTrans::B(2).tree_hash_root(), u8_hash(2));
}
#[derive(TreeHash)]
#[tree_hash(enum_behaviour = "union")]
enum FixedUnion {
A(u8),
B(u8),
}
#[test]
fn fixed_union() {
assert_eq!(FixedUnion::A(2).tree_hash_root(), u8_hash_concat(2, 0));
assert_eq!(FixedUnion::B(2).tree_hash_root(), u8_hash_concat(2, 1));
}
#[derive(TreeHash)]
#[tree_hash(enum_behaviour = "transparent")]
enum VariableTrans {
A(HashVec),
B(HashVec),
}
#[test]
fn variable_trans() {
assert_eq!(
VariableTrans::A(HashVec::from(vec![2])).tree_hash_root(),
u8_hash_concat(2, 1)
);
assert_eq!(
VariableTrans::B(HashVec::from(vec![2])).tree_hash_root(),
u8_hash_concat(2, 1)
);
}
#[derive(TreeHash)]
#[tree_hash(enum_behaviour = "union")]
enum VariableUnion {
A(HashVec),
B(HashVec),
}
#[test]
fn variable_union() {
assert_eq!(
VariableUnion::A(HashVec::from(vec![2])).tree_hash_root(),
mix_in_selector(u8_hash_concat(2, 1), 0)
);
assert_eq!(
VariableUnion::B(HashVec::from(vec![2])).tree_hash_root(),
mix_in_selector(u8_hash_concat(2, 1), 1)
);
}

View File

@ -1,15 +0,0 @@
[package]
name = "tree_hash_derive"
version = "0.4.0"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2021"
description = "Procedural derive macros to accompany the tree_hash crate."
license = "Apache-2.0"
[lib]
proc-macro = true
[dependencies]
syn = "1.0.42"
quote = "1.0.7"
darling = "0.13.0"

View File

@ -1,336 +0,0 @@
use darling::FromDeriveInput;
use proc_macro::TokenStream;
use quote::quote;
use std::convert::TryInto;
use syn::{parse_macro_input, Attribute, DataEnum, DataStruct, DeriveInput, Meta};
/// The highest possible union selector value (higher values are reserved for backwards compatible
/// extensions).
const MAX_UNION_SELECTOR: u8 = 127;
#[derive(Debug, FromDeriveInput)]
#[darling(attributes(tree_hash))]
struct StructOpts {
#[darling(default)]
enum_behaviour: Option<String>,
}
const ENUM_TRANSPARENT: &str = "transparent";
const ENUM_UNION: &str = "union";
const ENUM_VARIANTS: &[&str] = &[ENUM_TRANSPARENT, ENUM_UNION];
const NO_ENUM_BEHAVIOUR_ERROR: &str = "enums require an \"enum_behaviour\" attribute, \
e.g., #[tree_hash(enum_behaviour = \"transparent\")]";
enum EnumBehaviour {
Transparent,
Union,
}
impl EnumBehaviour {
pub fn new(s: Option<String>) -> Option<Self> {
s.map(|s| match s.as_ref() {
ENUM_TRANSPARENT => EnumBehaviour::Transparent,
ENUM_UNION => EnumBehaviour::Union,
other => panic!(
"{} is an invalid enum_behaviour, use either {:?}",
other, ENUM_VARIANTS
),
})
}
}
/// Return a Vec of `syn::Ident` for each named field in the struct, whilst filtering out fields
/// that should not be hashed.
///
/// # Panics
/// Any unnamed struct field (like in a tuple struct) will raise a panic at compile time.
fn get_hashable_fields(struct_data: &syn::DataStruct) -> Vec<&syn::Ident> {
get_hashable_fields_and_their_caches(struct_data)
.into_iter()
.map(|(ident, _, _)| ident)
.collect()
}
/// Return a Vec of the hashable fields of a struct, and each field's type and optional cache field.
fn get_hashable_fields_and_their_caches(
struct_data: &syn::DataStruct,
) -> Vec<(&syn::Ident, syn::Type, Option<syn::Ident>)> {
struct_data
.fields
.iter()
.filter_map(|f| {
if should_skip_hashing(f) {
None
} else {
let ident = f
.ident
.as_ref()
.expect("tree_hash_derive only supports named struct fields");
let opt_cache_field = get_cache_field_for(f);
Some((ident, f.ty.clone(), opt_cache_field))
}
})
.collect()
}
/// Parse the cached_tree_hash attribute for a field.
///
/// Extract the cache field name from `#[cached_tree_hash(cache_field_name)]`
///
/// Return `Some(cache_field_name)` if the field has a cached tree hash attribute,
/// or `None` otherwise.
fn get_cache_field_for(field: &syn::Field) -> Option<syn::Ident> {
use syn::{MetaList, NestedMeta};
let parsed_attrs = cached_tree_hash_attr_metas(&field.attrs);
if let [Meta::List(MetaList { nested, .. })] = &parsed_attrs[..] {
nested.iter().find_map(|x| match x {
NestedMeta::Meta(Meta::Path(path)) => path.get_ident().cloned(),
_ => None,
})
} else {
None
}
}
/// Process the `cached_tree_hash` attributes from a list of attributes into structured `Meta`s.
fn cached_tree_hash_attr_metas(attrs: &[Attribute]) -> Vec<Meta> {
attrs
.iter()
.filter(|attr| attr.path.is_ident("cached_tree_hash"))
.flat_map(|attr| attr.parse_meta())
.collect()
}
/// Returns true if some field has an attribute declaring it should not be hashed.
///
/// The field attribute is: `#[tree_hash(skip_hashing)]`
fn should_skip_hashing(field: &syn::Field) -> bool {
field.attrs.iter().any(|attr| {
attr.path.is_ident("tree_hash")
&& attr.tokens.to_string().replace(' ', "") == "(skip_hashing)"
})
}
/// Implements `tree_hash::TreeHash` for some `struct`.
///
/// Fields are hashed in the order they are defined.
#[proc_macro_derive(TreeHash, attributes(tree_hash))]
pub fn tree_hash_derive(input: TokenStream) -> TokenStream {
let item = parse_macro_input!(input as DeriveInput);
let opts = StructOpts::from_derive_input(&item).unwrap();
let enum_opt = EnumBehaviour::new(opts.enum_behaviour);
match &item.data {
syn::Data::Struct(s) => {
if enum_opt.is_some() {
panic!("enum_behaviour is invalid for structs");
}
tree_hash_derive_struct(&item, s)
}
syn::Data::Enum(s) => match enum_opt.expect(NO_ENUM_BEHAVIOUR_ERROR) {
EnumBehaviour::Transparent => tree_hash_derive_enum_transparent(&item, s),
EnumBehaviour::Union => tree_hash_derive_enum_union(&item, s),
},
_ => panic!("tree_hash_derive only supports structs and enums."),
}
}
fn tree_hash_derive_struct(item: &DeriveInput, struct_data: &DataStruct) -> TokenStream {
let name = &item.ident;
let (impl_generics, ty_generics, where_clause) = &item.generics.split_for_impl();
let idents = get_hashable_fields(struct_data);
let num_leaves = idents.len();
let output = quote! {
impl #impl_generics tree_hash::TreeHash for #name #ty_generics #where_clause {
fn tree_hash_type() -> tree_hash::TreeHashType {
tree_hash::TreeHashType::Container
}
fn tree_hash_packed_encoding(&self) -> tree_hash::PackedEncoding {
unreachable!("Struct should never be packed.")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("Struct should never be packed.")
}
fn tree_hash_root(&self) -> tree_hash::Hash256 {
let mut hasher = tree_hash::MerkleHasher::with_leaves(#num_leaves);
#(
hasher.write(self.#idents.tree_hash_root().as_bytes())
.expect("tree hash derive should not apply too many leaves");
)*
hasher.finish().expect("tree hash derive should not have a remaining buffer")
}
}
};
output.into()
}
/// Derive `TreeHash` for an enum in the "transparent" method.
///
/// The "transparent" method is distinct from the "union" method specified in the SSZ specification.
/// When using "transparent", the enum will be ignored and the contained field will be hashed as if
/// the enum does not exist.
///
///## Limitations
///
/// Only supports:
/// - Enums with a single field per variant, where
/// - All fields are "container" types.
///
/// ## Panics
///
/// Will panic at compile-time if the single field requirement isn't met, but will panic *at run
/// time* if the container type requirement isn't met.
fn tree_hash_derive_enum_transparent(
derive_input: &DeriveInput,
enum_data: &DataEnum,
) -> TokenStream {
let name = &derive_input.ident;
let (impl_generics, ty_generics, where_clause) = &derive_input.generics.split_for_impl();
let (patterns, type_exprs): (Vec<_>, Vec<_>) = enum_data
.variants
.iter()
.map(|variant| {
let variant_name = &variant.ident;
if variant.fields.len() != 1 {
panic!("TreeHash can only be derived for enums with 1 field per variant");
}
let pattern = quote! {
#name::#variant_name(ref inner)
};
let ty = &(&variant.fields).into_iter().next().unwrap().ty;
let type_expr = quote! {
<#ty as tree_hash::TreeHash>::tree_hash_type()
};
(pattern, type_expr)
})
.unzip();
let output = quote! {
impl #impl_generics tree_hash::TreeHash for #name #ty_generics #where_clause {
fn tree_hash_type() -> tree_hash::TreeHashType {
#(
assert_eq!(
#type_exprs,
tree_hash::TreeHashType::Container,
"all variants must be of container type"
);
)*
tree_hash::TreeHashType::Container
}
fn tree_hash_packed_encoding(&self) -> tree_hash::PackedEncoding {
unreachable!("Enum should never be packed")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("Enum should never be packed")
}
fn tree_hash_root(&self) -> tree_hash::Hash256 {
match self {
#(
#patterns => inner.tree_hash_root(),
)*
}
}
}
};
output.into()
}
/// Derive `TreeHash` for an `enum` following the "union" SSZ spec.
///
/// The union selector will be determined based upon the order in which the enum variants are
/// defined. E.g., the top-most variant in the enum will have a selector of `0`, the variant
/// beneath it will have a selector of `1` and so on.
///
/// # Limitations
///
/// Only supports enums where each variant has a single field.
fn tree_hash_derive_enum_union(derive_input: &DeriveInput, enum_data: &DataEnum) -> TokenStream {
let name = &derive_input.ident;
let (impl_generics, ty_generics, where_clause) = &derive_input.generics.split_for_impl();
let patterns: Vec<_> = enum_data
.variants
.iter()
.map(|variant| {
let variant_name = &variant.ident;
if variant.fields.len() != 1 {
panic!("TreeHash can only be derived for enums with 1 field per variant");
}
quote! {
#name::#variant_name(ref inner)
}
})
.collect();
let union_selectors = compute_union_selectors(patterns.len());
let output = quote! {
impl #impl_generics tree_hash::TreeHash for #name #ty_generics #where_clause {
fn tree_hash_type() -> tree_hash::TreeHashType {
tree_hash::TreeHashType::Container
}
fn tree_hash_packed_encoding(&self) -> tree_hash::PackedEncoding {
unreachable!("Enum should never be packed")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("Enum should never be packed")
}
fn tree_hash_root(&self) -> tree_hash::Hash256 {
match self {
#(
#patterns => {
let root = inner.tree_hash_root();
let selector = #union_selectors;
tree_hash::mix_in_selector(&root, selector)
.expect("derive macro should prevent out-of-bounds selectors")
},
)*
}
}
}
};
output.into()
}
fn compute_union_selectors(num_variants: usize) -> Vec<u8> {
let union_selectors = (0..num_variants)
.map(|i| {
i.try_into()
.expect("union selector exceeds u8::max_value, union has too many variants")
})
.collect::<Vec<u8>>();
let highest_selector = union_selectors
.last()
.copied()
.expect("0-variant union is not permitted");
assert!(
highest_selector <= MAX_UNION_SELECTOR,
"union selector {} exceeds limit of {}, enum has too many variants",
highest_selector,
MAX_UNION_SELECTOR
);
union_selectors
}

View File

@ -15,7 +15,7 @@ compare_fields = { path = "../../common/compare_fields" }
compare_fields_derive = { path = "../../common/compare_fields_derive" } compare_fields_derive = { path = "../../common/compare_fields_derive" }
eth2_interop_keypairs = { path = "../../common/eth2_interop_keypairs" } eth2_interop_keypairs = { path = "../../common/eth2_interop_keypairs" }
ethereum-types = { version = "0.14.1", features = ["arbitrary"] } ethereum-types = { version = "0.14.1", features = ["arbitrary"] }
eth2_hashing = "0.3.0" ethereum_hashing = "1.0.0-beta.2"
hex = "0.4.2" hex = "0.4.2"
int_to_bytes = { path = "../int_to_bytes" } int_to_bytes = { path = "../int_to_bytes" }
log = "0.4.11" log = "0.4.11"
@ -25,13 +25,13 @@ safe_arith = { path = "../safe_arith" }
serde = {version = "1.0.116" , features = ["rc"] } serde = {version = "1.0.116" , features = ["rc"] }
serde_derive = "1.0.116" serde_derive = "1.0.116"
slog = "2.5.2" slog = "2.5.2"
eth2_ssz = { version = "0.4.1", features = ["arbitrary"] } ethereum_ssz = { version = "0.5.0", features = ["arbitrary"] }
eth2_ssz_derive = "0.3.1" ethereum_ssz_derive = "0.5.0"
eth2_ssz_types = { version = "0.2.2", features = ["arbitrary"] } ssz_types = { version = "0.5.0", features = ["arbitrary"] }
swap_or_not_shuffle = { path = "../swap_or_not_shuffle", features = ["arbitrary"] } swap_or_not_shuffle = { path = "../swap_or_not_shuffle", features = ["arbitrary"] }
test_random_derive = { path = "../../common/test_random_derive" } test_random_derive = { path = "../../common/test_random_derive" }
tree_hash = { version = "0.4.1", features = ["arbitrary"] } tree_hash = { version = "0.5.0", features = ["arbitrary"] }
tree_hash_derive = "0.4.0" tree_hash_derive = "0.5.0"
rand_xorshift = "0.3.0" rand_xorshift = "0.3.0"
cached_tree_hash = { path = "../cached_tree_hash" } cached_tree_hash = { path = "../cached_tree_hash" }
serde_yaml = "0.8.13" serde_yaml = "0.8.13"
@ -41,7 +41,7 @@ rusqlite = { version = "0.28.0", features = ["bundled"], optional = true }
# The arbitrary dependency is enabled by default since Capella to avoid complexity introduced by # The arbitrary dependency is enabled by default since Capella to avoid complexity introduced by
# `AbstractExecPayload` # `AbstractExecPayload`
arbitrary = { version = "1.0", features = ["derive"] } arbitrary = { version = "1.0", features = ["derive"] }
eth2_serde_utils = "0.1.1" ethereum_serde_utils = "0.5.0"
regex = "1.5.5" regex = "1.5.5"
lazy_static = "1.4.0" lazy_static = "1.4.0"
parking_lot = "0.12.0" parking_lot = "0.12.0"

Some files were not shown because too many files have changed in this diff Show More